diff --git a/dist/restore-only/index.js b/dist/restore-only/index.js index c42c938..d1fcc56 100644 --- a/dist/restore-only/index.js +++ b/dist/restore-only/index.js @@ -7792,7 +7792,7 @@ exports.isTokenCredential = isTokenCredential; Object.defineProperty(exports, "__esModule", ({ value: true })); -var uuid = __nccwpck_require__(3534); +var uuid = __nccwpck_require__(2048); var util = __nccwpck_require__(9023); var tslib = __nccwpck_require__(470); var xml2js = __nccwpck_require__(758); @@ -14157,652 +14157,6 @@ var __createBinding; }); -/***/ }), - -/***/ 3534: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -Object.defineProperty(exports, "v1", ({ - enumerable: true, - get: function () { - return _v.default; - } -})); -Object.defineProperty(exports, "v3", ({ - enumerable: true, - get: function () { - return _v2.default; - } -})); -Object.defineProperty(exports, "v4", ({ - enumerable: true, - get: function () { - return _v3.default; - } -})); -Object.defineProperty(exports, "v5", ({ - enumerable: true, - get: function () { - return _v4.default; - } -})); -Object.defineProperty(exports, "NIL", ({ - enumerable: true, - get: function () { - return _nil.default; - } -})); -Object.defineProperty(exports, "version", ({ - enumerable: true, - get: function () { - return _version.default; - } -})); -Object.defineProperty(exports, "validate", ({ - enumerable: true, - get: function () { - return _validate.default; - } -})); -Object.defineProperty(exports, "stringify", ({ - enumerable: true, - get: function () { - return _stringify.default; - } -})); -Object.defineProperty(exports, "parse", ({ - enumerable: true, - get: function () { - return _parse.default; - } -})); - -var _v = _interopRequireDefault(__nccwpck_require__(3417)); - -var _v2 = _interopRequireDefault(__nccwpck_require__(2855)); - -var _v3 = _interopRequireDefault(__nccwpck_require__(7974)); - -var _v4 = _interopRequireDefault(__nccwpck_require__(6165)); - -var _nil = _interopRequireDefault(__nccwpck_require__(7441)); - -var _version = _interopRequireDefault(__nccwpck_require__(5962)); - -var _validate = _interopRequireDefault(__nccwpck_require__(1690)); - -var _stringify = _interopRequireDefault(__nccwpck_require__(9651)); - -var _parse = _interopRequireDefault(__nccwpck_require__(6221)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/***/ }), - -/***/ 7370: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function md5(bytes) { - if (Array.isArray(bytes)) { - bytes = Buffer.from(bytes); - } else if (typeof bytes === 'string') { - bytes = Buffer.from(bytes, 'utf8'); - } - - return _crypto.default.createHash('md5').update(bytes).digest(); -} - -var _default = md5; -exports["default"] = _default; - -/***/ }), - -/***/ 7441: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; -var _default = '00000000-0000-0000-0000-000000000000'; -exports["default"] = _default; - -/***/ }), - -/***/ 6221: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _validate = _interopRequireDefault(__nccwpck_require__(1690)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function parse(uuid) { - if (!(0, _validate.default)(uuid)) { - throw TypeError('Invalid UUID'); - } - - let v; - const arr = new Uint8Array(16); // Parse ########-....-....-....-............ - - arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; - arr[1] = v >>> 16 & 0xff; - arr[2] = v >>> 8 & 0xff; - arr[3] = v & 0xff; // Parse ........-####-....-....-............ - - arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; - arr[5] = v & 0xff; // Parse ........-....-####-....-............ - - arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; - arr[7] = v & 0xff; // Parse ........-....-....-####-............ - - arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; - arr[9] = v & 0xff; // Parse ........-....-....-....-############ - // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) - - arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; - arr[11] = v / 0x100000000 & 0xff; - arr[12] = v >>> 24 & 0xff; - arr[13] = v >>> 16 & 0xff; - arr[14] = v >>> 8 & 0xff; - arr[15] = v & 0xff; - return arr; -} - -var _default = parse; -exports["default"] = _default; - -/***/ }), - -/***/ 8689: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; -var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; -exports["default"] = _default; - -/***/ }), - -/***/ 6299: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = rng; - -var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate - -let poolPtr = rnds8Pool.length; - -function rng() { - if (poolPtr > rnds8Pool.length - 16) { - _crypto.default.randomFillSync(rnds8Pool); - - poolPtr = 0; - } - - return rnds8Pool.slice(poolPtr, poolPtr += 16); -} - -/***/ }), - -/***/ 8821: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function sha1(bytes) { - if (Array.isArray(bytes)) { - bytes = Buffer.from(bytes); - } else if (typeof bytes === 'string') { - bytes = Buffer.from(bytes, 'utf8'); - } - - return _crypto.default.createHash('sha1').update(bytes).digest(); -} - -var _default = sha1; -exports["default"] = _default; - -/***/ }), - -/***/ 9651: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _validate = _interopRequireDefault(__nccwpck_require__(1690)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Convert array of 16 byte values to UUID string format of the form: - * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX - */ -const byteToHex = []; - -for (let i = 0; i < 256; ++i) { - byteToHex.push((i + 0x100).toString(16).substr(1)); -} - -function stringify(arr, offset = 0) { - // Note: Be careful editing this code! It's been tuned for performance - // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 - const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one - // of the following: - // - One or more input array values don't map to a hex octet (leading to - // "undefined" in the uuid) - // - Invalid input values for the RFC `version` or `variant` fields - - if (!(0, _validate.default)(uuid)) { - throw TypeError('Stringified UUID is invalid'); - } - - return uuid; -} - -var _default = stringify; -exports["default"] = _default; - -/***/ }), - -/***/ 3417: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _rng = _interopRequireDefault(__nccwpck_require__(6299)); - -var _stringify = _interopRequireDefault(__nccwpck_require__(9651)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -// **`v1()` - Generate time-based UUID** -// -// Inspired by https://github.com/LiosK/UUID.js -// and http://docs.python.org/library/uuid.html -let _nodeId; - -let _clockseq; // Previous uuid creation time - - -let _lastMSecs = 0; -let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details - -function v1(options, buf, offset) { - let i = buf && offset || 0; - const b = buf || new Array(16); - options = options || {}; - let node = options.node || _nodeId; - let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not - // specified. We do this lazily to minimize issues related to insufficient - // system entropy. See #189 - - if (node == null || clockseq == null) { - const seedBytes = options.random || (options.rng || _rng.default)(); - - if (node == null) { - // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) - node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; - } - - if (clockseq == null) { - // Per 4.2.2, randomize (14 bit) clockseq - clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; - } - } // UUID timestamps are 100 nano-second units since the Gregorian epoch, - // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so - // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' - // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. - - - let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock - // cycle to simulate higher resolution clock - - let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) - - const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression - - if (dt < 0 && options.clockseq === undefined) { - clockseq = clockseq + 1 & 0x3fff; - } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new - // time interval - - - if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { - nsecs = 0; - } // Per 4.2.1.2 Throw error if too many uuids are requested - - - if (nsecs >= 10000) { - throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); - } - - _lastMSecs = msecs; - _lastNSecs = nsecs; - _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch - - msecs += 12219292800000; // `time_low` - - const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; - b[i++] = tl >>> 24 & 0xff; - b[i++] = tl >>> 16 & 0xff; - b[i++] = tl >>> 8 & 0xff; - b[i++] = tl & 0xff; // `time_mid` - - const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; - b[i++] = tmh >>> 8 & 0xff; - b[i++] = tmh & 0xff; // `time_high_and_version` - - b[i++] = tmh >>> 24 & 0xf | 0x10; // include version - - b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) - - b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` - - b[i++] = clockseq & 0xff; // `node` - - for (let n = 0; n < 6; ++n) { - b[i + n] = node[n]; - } - - return buf || (0, _stringify.default)(b); -} - -var _default = v1; -exports["default"] = _default; - -/***/ }), - -/***/ 2855: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _v = _interopRequireDefault(__nccwpck_require__(8132)); - -var _md = _interopRequireDefault(__nccwpck_require__(7370)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -const v3 = (0, _v.default)('v3', 0x30, _md.default); -var _default = v3; -exports["default"] = _default; - -/***/ }), - -/***/ 8132: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = _default; -exports.URL = exports.DNS = void 0; - -var _stringify = _interopRequireDefault(__nccwpck_require__(9651)); - -var _parse = _interopRequireDefault(__nccwpck_require__(6221)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function stringToBytes(str) { - str = unescape(encodeURIComponent(str)); // UTF8 escape - - const bytes = []; - - for (let i = 0; i < str.length; ++i) { - bytes.push(str.charCodeAt(i)); - } - - return bytes; -} - -const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; -exports.DNS = DNS; -const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; -exports.URL = URL; - -function _default(name, version, hashfunc) { - function generateUUID(value, namespace, buf, offset) { - if (typeof value === 'string') { - value = stringToBytes(value); - } - - if (typeof namespace === 'string') { - namespace = (0, _parse.default)(namespace); - } - - if (namespace.length !== 16) { - throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); - } // Compute hash of namespace and value, Per 4.3 - // Future: Use spread syntax when supported on all platforms, e.g. `bytes = - // hashfunc([...namespace, ... value])` - - - let bytes = new Uint8Array(16 + value.length); - bytes.set(namespace); - bytes.set(value, namespace.length); - bytes = hashfunc(bytes); - bytes[6] = bytes[6] & 0x0f | version; - bytes[8] = bytes[8] & 0x3f | 0x80; - - if (buf) { - offset = offset || 0; - - for (let i = 0; i < 16; ++i) { - buf[offset + i] = bytes[i]; - } - - return buf; - } - - return (0, _stringify.default)(bytes); - } // Function#name is not settable on some platforms (#270) - - - try { - generateUUID.name = name; // eslint-disable-next-line no-empty - } catch (err) {} // For CommonJS default export support - - - generateUUID.DNS = DNS; - generateUUID.URL = URL; - return generateUUID; -} - -/***/ }), - -/***/ 7974: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _rng = _interopRequireDefault(__nccwpck_require__(6299)); - -var _stringify = _interopRequireDefault(__nccwpck_require__(9651)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function v4(options, buf, offset) { - options = options || {}; - - const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` - - - rnds[6] = rnds[6] & 0x0f | 0x40; - rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided - - if (buf) { - offset = offset || 0; - - for (let i = 0; i < 16; ++i) { - buf[offset + i] = rnds[i]; - } - - return buf; - } - - return (0, _stringify.default)(rnds); -} - -var _default = v4; -exports["default"] = _default; - -/***/ }), - -/***/ 6165: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _v = _interopRequireDefault(__nccwpck_require__(8132)); - -var _sha = _interopRequireDefault(__nccwpck_require__(8821)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -const v5 = (0, _v.default)('v5', 0x50, _sha.default); -var _default = v5; -exports["default"] = _default; - -/***/ }), - -/***/ 1690: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _regex = _interopRequireDefault(__nccwpck_require__(8689)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function validate(uuid) { - return typeof uuid === 'string' && _regex.default.test(uuid); -} - -var _default = validate; -exports["default"] = _default; - -/***/ }), - -/***/ 5962: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _validate = _interopRequireDefault(__nccwpck_require__(1690)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function version(uuid) { - if (!(0, _validate.default)(uuid)) { - throw TypeError('Invalid UUID'); - } - - return parseInt(uuid.substr(14, 1), 16); -} - -var _default = version; -exports["default"] = _default; - /***/ }), /***/ 5862: @@ -42639,6 +41993,619 @@ var __createBinding; }); +/***/ }), + +/***/ 9469: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/*! + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ResourceStream = exports.paginator = exports.Paginator = void 0; +/*! + * @module common/paginator + */ +const arrify = __nccwpck_require__(6251); +const extend = __nccwpck_require__(3860); +const resource_stream_1 = __nccwpck_require__(7618); +Object.defineProperty(exports, "ResourceStream", ({ enumerable: true, get: function () { return resource_stream_1.ResourceStream; } })); +/*! Developer Documentation + * + * paginator is used to auto-paginate `nextQuery` methods as well as + * streamifying them. + * + * Before: + * + * search.query('done=true', function(err, results, nextQuery) { + * search.query(nextQuery, function(err, results, nextQuery) {}); + * }); + * + * After: + * + * search.query('done=true', function(err, results) {}); + * + * Methods to extend should be written to accept callbacks and return a + * `nextQuery`. + */ +class Paginator { + /** + * Cache the original method, then overwrite it on the Class's prototype. + * + * @param {function} Class - The parent class of the methods to extend. + * @param {string|string[]} methodNames - Name(s) of the methods to extend. + */ + // tslint:disable-next-line:variable-name + extend(Class, methodNames) { + methodNames = arrify(methodNames); + methodNames.forEach(methodName => { + const originalMethod = Class.prototype[methodName]; + // map the original method to a private member + Class.prototype[methodName + '_'] = originalMethod; + // overwrite the original to auto-paginate + /* eslint-disable @typescript-eslint/no-explicit-any */ + Class.prototype[methodName] = function (...args) { + const parsedArguments = paginator.parseArguments_(args); + return paginator.run_(parsedArguments, originalMethod.bind(this)); + }; + }); + } + /** + * Wraps paginated API calls in a readable object stream. + * + * This method simply calls the nextQuery recursively, emitting results to a + * stream. The stream ends when `nextQuery` is null. + * + * `maxResults` will act as a cap for how many results are fetched and emitted + * to the stream. + * + * @param {string} methodName - Name of the method to streamify. + * @return {function} - Wrapped function. + */ + /* eslint-disable @typescript-eslint/no-explicit-any */ + streamify(methodName) { + return function ( + /* eslint-disable @typescript-eslint/no-explicit-any */ + ...args) { + const parsedArguments = paginator.parseArguments_(args); + const originalMethod = this[methodName + '_'] || this[methodName]; + return paginator.runAsStream_(parsedArguments, originalMethod.bind(this)); + }; + } + /** + * Parse a pseudo-array `arguments` for a query and callback. + * + * @param {array} args - The original `arguments` pseduo-array that the original + * method received. + */ + /* eslint-disable @typescript-eslint/no-explicit-any */ + parseArguments_(args) { + let query; + let autoPaginate = true; + let maxApiCalls = -1; + let maxResults = -1; + let callback; + const firstArgument = args[0]; + const lastArgument = args[args.length - 1]; + if (typeof firstArgument === 'function') { + callback = firstArgument; + } + else { + query = firstArgument; + } + if (typeof lastArgument === 'function') { + callback = lastArgument; + } + if (typeof query === 'object') { + query = extend(true, {}, query); + // Check if the user only asked for a certain amount of results. + if (query.maxResults && typeof query.maxResults === 'number') { + // `maxResults` is used API-wide. + maxResults = query.maxResults; + } + else if (typeof query.pageSize === 'number') { + // `pageSize` is Pub/Sub's `maxResults`. + maxResults = query.pageSize; + } + if (query.maxApiCalls && typeof query.maxApiCalls === 'number') { + maxApiCalls = query.maxApiCalls; + delete query.maxApiCalls; + } + // maxResults is the user specified limit. + if (maxResults !== -1 || query.autoPaginate === false) { + autoPaginate = false; + } + } + const parsedArguments = { + query: query || {}, + autoPaginate, + maxApiCalls, + maxResults, + callback, + }; + parsedArguments.streamOptions = extend(true, {}, parsedArguments.query); + delete parsedArguments.streamOptions.autoPaginate; + delete parsedArguments.streamOptions.maxResults; + delete parsedArguments.streamOptions.pageSize; + return parsedArguments; + } + /** + * This simply checks to see if `autoPaginate` is set or not, if it's true + * then we buffer all results, otherwise simply call the original method. + * + * @param {array} parsedArguments - Parsed arguments from the original method + * call. + * @param {object=|string=} parsedArguments.query - Query object. This is most + * commonly an object, but to make the API more simple, it can also be a + * string in some places. + * @param {function=} parsedArguments.callback - Callback function. + * @param {boolean} parsedArguments.autoPaginate - Auto-pagination enabled. + * @param {boolean} parsedArguments.maxApiCalls - Maximum API calls to make. + * @param {number} parsedArguments.maxResults - Maximum results to return. + * @param {function} originalMethod - The cached method that accepts a callback + * and returns `nextQuery` to receive more results. + */ + run_(parsedArguments, originalMethod) { + const query = parsedArguments.query; + const callback = parsedArguments.callback; + if (!parsedArguments.autoPaginate) { + return originalMethod(query, callback); + } + const results = new Array(); + let otherArgs = []; + const promise = new Promise((resolve, reject) => { + const stream = paginator.runAsStream_(parsedArguments, originalMethod); + stream + .on('error', reject) + .on('data', (data) => results.push(data)) + .on('end', () => { + otherArgs = stream._otherArgs || []; + resolve(results); + }); + }); + if (!callback) { + return promise.then(results => [results, query, ...otherArgs]); + } + promise.then(results => callback(null, results, query, ...otherArgs), (err) => callback(err)); + } + /** + * This method simply calls the nextQuery recursively, emitting results to a + * stream. The stream ends when `nextQuery` is null. + * + * `maxResults` will act as a cap for how many results are fetched and emitted + * to the stream. + * + * @param {object=|string=} parsedArguments.query - Query object. This is most + * commonly an object, but to make the API more simple, it can also be a + * string in some places. + * @param {function=} parsedArguments.callback - Callback function. + * @param {boolean} parsedArguments.autoPaginate - Auto-pagination enabled. + * @param {boolean} parsedArguments.maxApiCalls - Maximum API calls to make. + * @param {number} parsedArguments.maxResults - Maximum results to return. + * @param {function} originalMethod - The cached method that accepts a callback + * and returns `nextQuery` to receive more results. + * @return {stream} - Readable object stream. + */ + /* eslint-disable @typescript-eslint/no-explicit-any */ + runAsStream_(parsedArguments, originalMethod) { + return new resource_stream_1.ResourceStream(parsedArguments, originalMethod); + } +} +exports.Paginator = Paginator; +const paginator = new Paginator(); +exports.paginator = paginator; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 7618: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/*! + * Copyright 2019 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ResourceStream = void 0; +const stream_1 = __nccwpck_require__(2203); +class ResourceStream extends stream_1.Transform { + constructor(args, requestFn) { + const options = Object.assign({ objectMode: true }, args.streamOptions); + super(options); + this._ended = false; + this._maxApiCalls = args.maxApiCalls === -1 ? Infinity : args.maxApiCalls; + this._nextQuery = args.query; + this._reading = false; + this._requestFn = requestFn; + this._requestsMade = 0; + this._resultsToSend = args.maxResults === -1 ? Infinity : args.maxResults; + this._otherArgs = []; + } + /* eslint-disable @typescript-eslint/no-explicit-any */ + end(...args) { + this._ended = true; + return super.end(...args); + } + _read() { + if (this._reading) { + return; + } + this._reading = true; + // Wrap in a try/catch to catch input linting errors, e.g. + // an invalid BigQuery query. These errors are thrown in an + // async fashion, which makes them un-catchable by the user. + try { + this._requestFn(this._nextQuery, (err, results, nextQuery, ...otherArgs) => { + if (err) { + this.destroy(err); + return; + } + this._otherArgs = otherArgs; + this._nextQuery = nextQuery; + if (this._resultsToSend !== Infinity) { + results = results.splice(0, this._resultsToSend); + this._resultsToSend -= results.length; + } + let more = true; + for (const result of results) { + if (this._ended) { + break; + } + more = this.push(result); + } + const isFinished = !this._nextQuery || this._resultsToSend < 1; + const madeMaxCalls = ++this._requestsMade >= this._maxApiCalls; + if (isFinished || madeMaxCalls) { + this.end(); + } + if (more && !this._ended) { + setImmediate(() => this._read()); + } + this._reading = false; + }); + } + catch (e) { + this.destroy(e); + } + } +} +exports.ResourceStream = ResourceStream; +//# sourceMappingURL=resource-stream.js.map + +/***/ }), + +/***/ 7635: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.MissingProjectIdError = exports.replaceProjectIdToken = void 0; +const stream_1 = __nccwpck_require__(2203); +// Copyright 2014 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +/** + * Populate the `{{projectId}}` placeholder. + * + * @throws {Error} If a projectId is required, but one is not provided. + * + * @param {*} - Any input value that may contain a placeholder. Arrays and objects will be looped. + * @param {string} projectId - A projectId. If not provided + * @return {*} - The original argument with all placeholders populated. + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function replaceProjectIdToken(value, projectId) { + if (Array.isArray(value)) { + value = value.map(v => replaceProjectIdToken(v, projectId)); + } + if (value !== null && + typeof value === 'object' && + !(value instanceof Buffer) && + !(value instanceof stream_1.Stream) && + typeof value.hasOwnProperty === 'function') { + for (const opt in value) { + // eslint-disable-next-line no-prototype-builtins + if (value.hasOwnProperty(opt)) { + value[opt] = replaceProjectIdToken(value[opt], projectId); + } + } + } + if (typeof value === 'string' && + value.indexOf('{{projectId}}') > -1) { + if (!projectId || projectId === '{{projectId}}') { + throw new MissingProjectIdError(); + } + value = value.replace(/{{projectId}}/g, projectId); + } + return value; +} +exports.replaceProjectIdToken = replaceProjectIdToken; +/** + * Custom error type for missing project ID errors. + */ +class MissingProjectIdError extends Error { + constructor() { + super(...arguments); + this.message = `Sorry, we cannot connect to Cloud Services without a project + ID. You may specify one with an environment variable named + "GOOGLE_CLOUD_PROJECT".`.replace(/ +/g, ' '); + } +} +exports.MissingProjectIdError = MissingProjectIdError; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 206: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* eslint-disable prefer-rest-params */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.callbackifyAll = exports.callbackify = exports.promisifyAll = exports.promisify = void 0; +/** + * Wraps a callback style function to conditionally return a promise. + * + * @param {function} originalMethod - The method to promisify. + * @param {object=} options - Promise options. + * @param {boolean} options.singular - Resolve the promise with single arg instead of an array. + * @return {function} wrapped + */ +function promisify(originalMethod, options) { + if (originalMethod.promisified_) { + return originalMethod; + } + options = options || {}; + const slice = Array.prototype.slice; + // tslint:disable-next-line:no-any + const wrapper = function () { + let last; + for (last = arguments.length - 1; last >= 0; last--) { + const arg = arguments[last]; + if (typeof arg === 'undefined') { + continue; // skip trailing undefined. + } + if (typeof arg !== 'function') { + break; // non-callback last argument found. + } + return originalMethod.apply(this, arguments); + } + // peel trailing undefined. + const args = slice.call(arguments, 0, last + 1); + // tslint:disable-next-line:variable-name + let PromiseCtor = Promise; + // Because dedupe will likely create a single install of + // @google-cloud/common to be shared amongst all modules, we need to + // localize it at the Service level. + if (this && this.Promise) { + PromiseCtor = this.Promise; + } + return new PromiseCtor((resolve, reject) => { + // tslint:disable-next-line:no-any + args.push((...args) => { + const callbackArgs = slice.call(args); + const err = callbackArgs.shift(); + if (err) { + return reject(err); + } + if (options.singular && callbackArgs.length === 1) { + resolve(callbackArgs[0]); + } + else { + resolve(callbackArgs); + } + }); + originalMethod.apply(this, args); + }); + }; + wrapper.promisified_ = true; + return wrapper; +} +exports.promisify = promisify; +/** + * Promisifies certain Class methods. This will not promisify private or + * streaming methods. + * + * @param {module:common/service} Class - Service class. + * @param {object=} options - Configuration object. + */ +// tslint:disable-next-line:variable-name +function promisifyAll(Class, options) { + const exclude = (options && options.exclude) || []; + const ownPropertyNames = Object.getOwnPropertyNames(Class.prototype); + const methods = ownPropertyNames.filter(methodName => { + // clang-format off + return (!exclude.includes(methodName) && + typeof Class.prototype[methodName] === 'function' && // is it a function? + !/(^_|(Stream|_)|promise$)|^constructor$/.test(methodName) // is it promisable? + ); + // clang-format on + }); + methods.forEach(methodName => { + const originalMethod = Class.prototype[methodName]; + if (!originalMethod.promisified_) { + Class.prototype[methodName] = exports.promisify(originalMethod, options); + } + }); +} +exports.promisifyAll = promisifyAll; +/** + * Wraps a promisy type function to conditionally call a callback function. + * + * @param {function} originalMethod - The method to callbackify. + * @param {object=} options - Callback options. + * @param {boolean} options.singular - Pass to the callback a single arg instead of an array. + * @return {function} wrapped + */ +function callbackify(originalMethod) { + if (originalMethod.callbackified_) { + return originalMethod; + } + // tslint:disable-next-line:no-any + const wrapper = function () { + if (typeof arguments[arguments.length - 1] !== 'function') { + return originalMethod.apply(this, arguments); + } + const cb = Array.prototype.pop.call(arguments); + originalMethod.apply(this, arguments).then( + // tslint:disable-next-line:no-any + (res) => { + res = Array.isArray(res) ? res : [res]; + cb(null, ...res); + }, (err) => cb(err)); + }; + wrapper.callbackified_ = true; + return wrapper; +} +exports.callbackify = callbackify; +/** + * Callbackifies certain Class methods. This will not callbackify private or + * streaming methods. + * + * @param {module:common/service} Class - Service class. + * @param {object=} options - Configuration object. + */ +function callbackifyAll( +// tslint:disable-next-line:variable-name +Class, options) { + const exclude = (options && options.exclude) || []; + const ownPropertyNames = Object.getOwnPropertyNames(Class.prototype); + const methods = ownPropertyNames.filter(methodName => { + // clang-format off + return (!exclude.includes(methodName) && + typeof Class.prototype[methodName] === 'function' && // is it a function? + !/^_|(Stream|_)|^constructor$/.test(methodName) // is it callbackifyable? + ); + // clang-format on + }); + methods.forEach(methodName => { + const originalMethod = Class.prototype[methodName]; + if (!originalMethod.callbackified_) { + Class.prototype[methodName] = exports.callbackify(originalMethod); + } + }); +} +exports.callbackifyAll = callbackifyAll; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 9977: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +const Queue = __nccwpck_require__(538); + +const pLimit = concurrency => { + if (!((Number.isInteger(concurrency) || concurrency === Infinity) && concurrency > 0)) { + throw new TypeError('Expected `concurrency` to be a number from 1 and up'); + } + + const queue = new Queue(); + let activeCount = 0; + + const next = () => { + activeCount--; + + if (queue.size > 0) { + queue.dequeue()(); + } + }; + + const run = async (fn, resolve, ...args) => { + activeCount++; + + const result = (async () => fn(...args))(); + + resolve(result); + + try { + await result; + } catch {} + + next(); + }; + + const enqueue = (fn, resolve, ...args) => { + queue.enqueue(run.bind(null, fn, resolve, ...args)); + + (async () => { + // This function needs to wait until the next microtask before comparing + // `activeCount` to `concurrency`, because `activeCount` is updated asynchronously + // when the run function is dequeued and called. The comparison in the if-statement + // needs to happen asynchronously as well to get an up-to-date value for `activeCount`. + await Promise.resolve(); + + if (activeCount < concurrency && queue.size > 0) { + queue.dequeue()(); + } + })(); + }; + + const generator = (fn, ...args) => new Promise(resolve => { + enqueue(fn, resolve, ...args); + }); + + Object.defineProperties(generator, { + activeCount: { + get: () => activeCount + }, + pendingCount: { + get: () => queue.size + }, + clearQueue: { + value: () => { + queue.clear(); + } + } + }); + + return generator; +}; + +module.exports = pLimit; + + /***/ }), /***/ 9750: @@ -42751,7 +42718,7 @@ exports.ContextAPI = ContextAPI; */ Object.defineProperty(exports, "__esModule", ({ value: true })); exports.DiagAPI = void 0; -const ComponentLogger_1 = __nccwpck_require__(7723); +const ComponentLogger_1 = __nccwpck_require__(104); const logLevelLogger_1 = __nccwpck_require__(3514); const types_1 = __nccwpck_require__(2573); const global_utils_1 = __nccwpck_require__(9923); @@ -43473,7 +43440,7 @@ exports.diag = diag_1.DiagAPI.instance(); /***/ }), -/***/ 7723: +/***/ 104: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -50282,6 +50249,531 @@ class ReflectionTypeCheck { exports.ReflectionTypeCheck = ReflectionTypeCheck; +/***/ }), + +/***/ 8662: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +function once(emitter, name, { signal } = {}) { + return new Promise((resolve, reject) => { + function cleanup() { + signal === null || signal === void 0 ? void 0 : signal.removeEventListener('abort', cleanup); + emitter.removeListener(name, onEvent); + emitter.removeListener('error', onError); + } + function onEvent(...args) { + cleanup(); + resolve(args); + } + function onError(err) { + cleanup(); + reject(err); + } + signal === null || signal === void 0 ? void 0 : signal.addEventListener('abort', cleanup); + emitter.on(name, onEvent); + emitter.on('error', onError); + }); +} +exports["default"] = once; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 7413: +/***/ ((module, exports, __nccwpck_require__) => { + +"use strict"; +/** + * @author Toru Nagashima + * See LICENSE file in root directory for full license. + */ + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +var eventTargetShim = __nccwpck_require__(6577); + +/** + * The signal class. + * @see https://dom.spec.whatwg.org/#abortsignal + */ +class AbortSignal extends eventTargetShim.EventTarget { + /** + * AbortSignal cannot be constructed directly. + */ + constructor() { + super(); + throw new TypeError("AbortSignal cannot be constructed directly"); + } + /** + * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise. + */ + get aborted() { + const aborted = abortedFlags.get(this); + if (typeof aborted !== "boolean") { + throw new TypeError(`Expected 'this' to be an 'AbortSignal' object, but got ${this === null ? "null" : typeof this}`); + } + return aborted; + } +} +eventTargetShim.defineEventAttribute(AbortSignal.prototype, "abort"); +/** + * Create an AbortSignal object. + */ +function createAbortSignal() { + const signal = Object.create(AbortSignal.prototype); + eventTargetShim.EventTarget.call(signal); + abortedFlags.set(signal, false); + return signal; +} +/** + * Abort a given signal. + */ +function abortSignal(signal) { + if (abortedFlags.get(signal) !== false) { + return; + } + abortedFlags.set(signal, true); + signal.dispatchEvent({ type: "abort" }); +} +/** + * Aborted flag for each instances. + */ +const abortedFlags = new WeakMap(); +// Properties should be enumerable. +Object.defineProperties(AbortSignal.prototype, { + aborted: { enumerable: true }, +}); +// `toString()` should return `"[object AbortSignal]"` +if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, { + configurable: true, + value: "AbortSignal", + }); +} + +/** + * The AbortController. + * @see https://dom.spec.whatwg.org/#abortcontroller + */ +class AbortController { + /** + * Initialize this controller. + */ + constructor() { + signals.set(this, createAbortSignal()); + } + /** + * Returns the `AbortSignal` object associated with this object. + */ + get signal() { + return getSignal(this); + } + /** + * Abort and signal to any observers that the associated activity is to be aborted. + */ + abort() { + abortSignal(getSignal(this)); + } +} +/** + * Associated signals. + */ +const signals = new WeakMap(); +/** + * Get the associated signal of a given controller. + */ +function getSignal(controller) { + const signal = signals.get(controller); + if (signal == null) { + throw new TypeError(`Expected 'this' to be an 'AbortController' object, but got ${controller === null ? "null" : typeof controller}`); + } + return signal; +} +// Properties should be enumerable. +Object.defineProperties(AbortController.prototype, { + signal: { enumerable: true }, + abort: { enumerable: true }, +}); +if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(AbortController.prototype, Symbol.toStringTag, { + configurable: true, + value: "AbortController", + }); +} + +exports.AbortController = AbortController; +exports.AbortSignal = AbortSignal; +exports["default"] = AbortController; + +module.exports = AbortController +module.exports.AbortController = module.exports["default"] = AbortController +module.exports.AbortSignal = AbortSignal +//# sourceMappingURL=abort-controller.js.map + + +/***/ }), + +/***/ 5183: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.req = exports.json = exports.toBuffer = void 0; +const http = __importStar(__nccwpck_require__(8611)); +const https = __importStar(__nccwpck_require__(5692)); +async function toBuffer(stream) { + let length = 0; + const chunks = []; + for await (const chunk of stream) { + length += chunk.length; + chunks.push(chunk); + } + return Buffer.concat(chunks, length); +} +exports.toBuffer = toBuffer; +// eslint-disable-next-line @typescript-eslint/no-explicit-any +async function json(stream) { + const buf = await toBuffer(stream); + const str = buf.toString('utf8'); + try { + return JSON.parse(str); + } + catch (_err) { + const err = _err; + err.message += ` (input: ${str})`; + throw err; + } +} +exports.json = json; +function req(url, opts = {}) { + const href = typeof url === 'string' ? url : url.href; + const req = (href.startsWith('https:') ? https : http).request(url, opts); + const promise = new Promise((resolve, reject) => { + req + .once('response', resolve) + .once('error', reject) + .end(); + }); + req.then = promise.then.bind(promise); + return req; +} +exports.req = req; +//# sourceMappingURL=helpers.js.map + +/***/ }), + +/***/ 8894: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Agent = void 0; +const net = __importStar(__nccwpck_require__(9278)); +const http = __importStar(__nccwpck_require__(8611)); +const https_1 = __nccwpck_require__(5692); +__exportStar(__nccwpck_require__(5183), exports); +const INTERNAL = Symbol('AgentBaseInternalState'); +class Agent extends http.Agent { + constructor(opts) { + super(opts); + this[INTERNAL] = {}; + } + /** + * Determine whether this is an `http` or `https` request. + */ + isSecureEndpoint(options) { + if (options) { + // First check the `secureEndpoint` property explicitly, since this + // means that a parent `Agent` is "passing through" to this instance. + // eslint-disable-next-line @typescript-eslint/no-explicit-any + if (typeof options.secureEndpoint === 'boolean') { + return options.secureEndpoint; + } + // If no explicit `secure` endpoint, check if `protocol` property is + // set. This will usually be the case since using a full string URL + // or `URL` instance should be the most common usage. + if (typeof options.protocol === 'string') { + return options.protocol === 'https:'; + } + } + // Finally, if no `protocol` property was set, then fall back to + // checking the stack trace of the current call stack, and try to + // detect the "https" module. + const { stack } = new Error(); + if (typeof stack !== 'string') + return false; + return stack + .split('\n') + .some((l) => l.indexOf('(https.js:') !== -1 || + l.indexOf('node:https:') !== -1); + } + // In order to support async signatures in `connect()` and Node's native + // connection pooling in `http.Agent`, the array of sockets for each origin + // has to be updated synchronously. This is so the length of the array is + // accurate when `addRequest()` is next called. We achieve this by creating a + // fake socket and adding it to `sockets[origin]` and incrementing + // `totalSocketCount`. + incrementSockets(name) { + // If `maxSockets` and `maxTotalSockets` are both Infinity then there is no + // need to create a fake socket because Node.js native connection pooling + // will never be invoked. + if (this.maxSockets === Infinity && this.maxTotalSockets === Infinity) { + return null; + } + // All instances of `sockets` are expected TypeScript errors. The + // alternative is to add it as a private property of this class but that + // will break TypeScript subclassing. + if (!this.sockets[name]) { + // @ts-expect-error `sockets` is readonly in `@types/node` + this.sockets[name] = []; + } + const fakeSocket = new net.Socket({ writable: false }); + this.sockets[name].push(fakeSocket); + // @ts-expect-error `totalSocketCount` isn't defined in `@types/node` + this.totalSocketCount++; + return fakeSocket; + } + decrementSockets(name, socket) { + if (!this.sockets[name] || socket === null) { + return; + } + const sockets = this.sockets[name]; + const index = sockets.indexOf(socket); + if (index !== -1) { + sockets.splice(index, 1); + // @ts-expect-error `totalSocketCount` isn't defined in `@types/node` + this.totalSocketCount--; + if (sockets.length === 0) { + // @ts-expect-error `sockets` is readonly in `@types/node` + delete this.sockets[name]; + } + } + } + // In order to properly update the socket pool, we need to call `getName()` on + // the core `https.Agent` if it is a secureEndpoint. + getName(options) { + const secureEndpoint = typeof options.secureEndpoint === 'boolean' + ? options.secureEndpoint + : this.isSecureEndpoint(options); + if (secureEndpoint) { + // @ts-expect-error `getName()` isn't defined in `@types/node` + return https_1.Agent.prototype.getName.call(this, options); + } + // @ts-expect-error `getName()` isn't defined in `@types/node` + return super.getName(options); + } + createSocket(req, options, cb) { + const connectOpts = { + ...options, + secureEndpoint: this.isSecureEndpoint(options), + }; + const name = this.getName(connectOpts); + const fakeSocket = this.incrementSockets(name); + Promise.resolve() + .then(() => this.connect(req, connectOpts)) + .then((socket) => { + this.decrementSockets(name, fakeSocket); + if (socket instanceof http.Agent) { + try { + // @ts-expect-error `addRequest()` isn't defined in `@types/node` + return socket.addRequest(req, connectOpts); + } + catch (err) { + return cb(err); + } + } + this[INTERNAL].currentSocket = socket; + // @ts-expect-error `createSocket()` isn't defined in `@types/node` + super.createSocket(req, options, cb); + }, (err) => { + this.decrementSockets(name, fakeSocket); + cb(err); + }); + } + createConnection() { + const socket = this[INTERNAL].currentSocket; + this[INTERNAL].currentSocket = undefined; + if (!socket) { + throw new Error('No socket was returned in the `connect()` function'); + } + return socket; + } + get defaultPort() { + return (this[INTERNAL].defaultPort ?? + (this.protocol === 'https:' ? 443 : 80)); + } + set defaultPort(v) { + if (this[INTERNAL]) { + this[INTERNAL].defaultPort = v; + } + } + get protocol() { + return (this[INTERNAL].protocol ?? + (this.isSecureEndpoint() ? 'https:' : 'http:')); + } + set protocol(v) { + if (this[INTERNAL]) { + this[INTERNAL].protocol = v; + } + } +} +exports.Agent = Agent; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 6251: +/***/ ((module) => { + +"use strict"; + + +const arrify = value => { + if (value === null || value === undefined) { + return []; + } + + if (Array.isArray(value)) { + return value; + } + + if (typeof value === 'string') { + return [value]; + } + + if (typeof value[Symbol.iterator] === 'function') { + return [...value]; + } + + return [value]; +}; + +module.exports = arrify; + + +/***/ }), + +/***/ 5195: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +// Packages +var retrier = __nccwpck_require__(5546); + +function retry(fn, opts) { + function run(resolve, reject) { + var options = opts || {}; + var op; + + // Default `randomize` to true + if (!('randomize' in options)) { + options.randomize = true; + } + + op = retrier.operation(options); + + // We allow the user to abort retrying + // this makes sense in the cases where + // knowledge is obtained that retrying + // would be futile (e.g.: auth errors) + + function bail(err) { + reject(err || new Error('Aborted')); + } + + function onError(err, num) { + if (err.bail) { + bail(err); + return; + } + + if (!op.retry(err)) { + reject(op.mainError()); + } else if (options.onRetry) { + options.onRetry(err, num); + } + } + + function runAttempt(num) { + var val; + + try { + val = fn(bail, num); + } catch (err) { + onError(err, num); + return; + } + + Promise.resolve(val) + .then(resolve) + .catch(function catchIt(err) { + onError(err, num); + }); + } + + op.attempt(runAttempt); + } + + return new Promise(run); +} + +module.exports = retry; + + /***/ }), /***/ 1324: @@ -50793,6 +51285,3093 @@ function range(a, b, str) { } +/***/ }), + +/***/ 8793: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +exports.byteLength = byteLength +exports.toByteArray = toByteArray +exports.fromByteArray = fromByteArray + +var lookup = [] +var revLookup = [] +var Arr = typeof Uint8Array !== 'undefined' ? Uint8Array : Array + +var code = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/' +for (var i = 0, len = code.length; i < len; ++i) { + lookup[i] = code[i] + revLookup[code.charCodeAt(i)] = i +} + +// Support decoding URL-safe base64 strings, as Node.js does. +// See: https://en.wikipedia.org/wiki/Base64#URL_applications +revLookup['-'.charCodeAt(0)] = 62 +revLookup['_'.charCodeAt(0)] = 63 + +function getLens (b64) { + var len = b64.length + + if (len % 4 > 0) { + throw new Error('Invalid string. Length must be a multiple of 4') + } + + // Trim off extra bytes after placeholder bytes are found + // See: https://github.com/beatgammit/base64-js/issues/42 + var validLen = b64.indexOf('=') + if (validLen === -1) validLen = len + + var placeHoldersLen = validLen === len + ? 0 + : 4 - (validLen % 4) + + return [validLen, placeHoldersLen] +} + +// base64 is 4/3 + up to two characters of the original data +function byteLength (b64) { + var lens = getLens(b64) + var validLen = lens[0] + var placeHoldersLen = lens[1] + return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen +} + +function _byteLength (b64, validLen, placeHoldersLen) { + return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen +} + +function toByteArray (b64) { + var tmp + var lens = getLens(b64) + var validLen = lens[0] + var placeHoldersLen = lens[1] + + var arr = new Arr(_byteLength(b64, validLen, placeHoldersLen)) + + var curByte = 0 + + // if there are placeholders, only get up to the last complete 4 chars + var len = placeHoldersLen > 0 + ? validLen - 4 + : validLen + + var i + for (i = 0; i < len; i += 4) { + tmp = + (revLookup[b64.charCodeAt(i)] << 18) | + (revLookup[b64.charCodeAt(i + 1)] << 12) | + (revLookup[b64.charCodeAt(i + 2)] << 6) | + revLookup[b64.charCodeAt(i + 3)] + arr[curByte++] = (tmp >> 16) & 0xFF + arr[curByte++] = (tmp >> 8) & 0xFF + arr[curByte++] = tmp & 0xFF + } + + if (placeHoldersLen === 2) { + tmp = + (revLookup[b64.charCodeAt(i)] << 2) | + (revLookup[b64.charCodeAt(i + 1)] >> 4) + arr[curByte++] = tmp & 0xFF + } + + if (placeHoldersLen === 1) { + tmp = + (revLookup[b64.charCodeAt(i)] << 10) | + (revLookup[b64.charCodeAt(i + 1)] << 4) | + (revLookup[b64.charCodeAt(i + 2)] >> 2) + arr[curByte++] = (tmp >> 8) & 0xFF + arr[curByte++] = tmp & 0xFF + } + + return arr +} + +function tripletToBase64 (num) { + return lookup[num >> 18 & 0x3F] + + lookup[num >> 12 & 0x3F] + + lookup[num >> 6 & 0x3F] + + lookup[num & 0x3F] +} + +function encodeChunk (uint8, start, end) { + var tmp + var output = [] + for (var i = start; i < end; i += 3) { + tmp = + ((uint8[i] << 16) & 0xFF0000) + + ((uint8[i + 1] << 8) & 0xFF00) + + (uint8[i + 2] & 0xFF) + output.push(tripletToBase64(tmp)) + } + return output.join('') +} + +function fromByteArray (uint8) { + var tmp + var len = uint8.length + var extraBytes = len % 3 // if we have 1 byte left, pad 2 bytes + var parts = [] + var maxChunkLength = 16383 // must be multiple of 3 + + // go through the array every three bytes, we'll deal with trailing stuff later + for (var i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) { + parts.push(encodeChunk(uint8, i, (i + maxChunkLength) > len2 ? len2 : (i + maxChunkLength))) + } + + // pad the end with zeros, but make sure to not forget the extra bytes + if (extraBytes === 1) { + tmp = uint8[len - 1] + parts.push( + lookup[tmp >> 2] + + lookup[(tmp << 4) & 0x3F] + + '==' + ) + } else if (extraBytes === 2) { + tmp = (uint8[len - 2] << 8) + uint8[len - 1] + parts.push( + lookup[tmp >> 10] + + lookup[(tmp >> 4) & 0x3F] + + lookup[(tmp << 2) & 0x3F] + + '=' + ) + } + + return parts.join('') +} + + +/***/ }), + +/***/ 1259: +/***/ (function(module) { + +;(function (globalObject) { + 'use strict'; + +/* + * bignumber.js v9.2.1 + * A JavaScript library for arbitrary-precision arithmetic. + * https://github.com/MikeMcl/bignumber.js + * Copyright (c) 2025 Michael Mclaughlin + * MIT Licensed. + * + * BigNumber.prototype methods | BigNumber methods + * | + * absoluteValue abs | clone + * comparedTo | config set + * decimalPlaces dp | DECIMAL_PLACES + * dividedBy div | ROUNDING_MODE + * dividedToIntegerBy idiv | EXPONENTIAL_AT + * exponentiatedBy pow | RANGE + * integerValue | CRYPTO + * isEqualTo eq | MODULO_MODE + * isFinite | POW_PRECISION + * isGreaterThan gt | FORMAT + * isGreaterThanOrEqualTo gte | ALPHABET + * isInteger | isBigNumber + * isLessThan lt | maximum max + * isLessThanOrEqualTo lte | minimum min + * isNaN | random + * isNegative | sum + * isPositive | + * isZero | + * minus | + * modulo mod | + * multipliedBy times | + * negated | + * plus | + * precision sd | + * shiftedBy | + * squareRoot sqrt | + * toExponential | + * toFixed | + * toFormat | + * toFraction | + * toJSON | + * toNumber | + * toPrecision | + * toString | + * valueOf | + * + */ + + + var BigNumber, + isNumeric = /^-?(?:\d+(?:\.\d*)?|\.\d+)(?:e[+-]?\d+)?$/i, + mathceil = Math.ceil, + mathfloor = Math.floor, + + bignumberError = '[BigNumber Error] ', + tooManyDigits = bignumberError + 'Number primitive has more than 15 significant digits: ', + + BASE = 1e14, + LOG_BASE = 14, + MAX_SAFE_INTEGER = 0x1fffffffffffff, // 2^53 - 1 + // MAX_INT32 = 0x7fffffff, // 2^31 - 1 + POWS_TEN = [1, 10, 100, 1e3, 1e4, 1e5, 1e6, 1e7, 1e8, 1e9, 1e10, 1e11, 1e12, 1e13], + SQRT_BASE = 1e7, + + // EDITABLE + // The limit on the value of DECIMAL_PLACES, TO_EXP_NEG, TO_EXP_POS, MIN_EXP, MAX_EXP, and + // the arguments to toExponential, toFixed, toFormat, and toPrecision. + MAX = 1E9; // 0 to MAX_INT32 + + + /* + * Create and return a BigNumber constructor. + */ + function clone(configObject) { + var div, convertBase, parseNumeric, + P = BigNumber.prototype = { constructor: BigNumber, toString: null, valueOf: null }, + ONE = new BigNumber(1), + + + //----------------------------- EDITABLE CONFIG DEFAULTS ------------------------------- + + + // The default values below must be integers within the inclusive ranges stated. + // The values can also be changed at run-time using BigNumber.set. + + // The maximum number of decimal places for operations involving division. + DECIMAL_PLACES = 20, // 0 to MAX + + // The rounding mode used when rounding to the above decimal places, and when using + // toExponential, toFixed, toFormat and toPrecision, and round (default value). + // UP 0 Away from zero. + // DOWN 1 Towards zero. + // CEIL 2 Towards +Infinity. + // FLOOR 3 Towards -Infinity. + // HALF_UP 4 Towards nearest neighbour. If equidistant, up. + // HALF_DOWN 5 Towards nearest neighbour. If equidistant, down. + // HALF_EVEN 6 Towards nearest neighbour. If equidistant, towards even neighbour. + // HALF_CEIL 7 Towards nearest neighbour. If equidistant, towards +Infinity. + // HALF_FLOOR 8 Towards nearest neighbour. If equidistant, towards -Infinity. + ROUNDING_MODE = 4, // 0 to 8 + + // EXPONENTIAL_AT : [TO_EXP_NEG , TO_EXP_POS] + + // The exponent value at and beneath which toString returns exponential notation. + // Number type: -7 + TO_EXP_NEG = -7, // 0 to -MAX + + // The exponent value at and above which toString returns exponential notation. + // Number type: 21 + TO_EXP_POS = 21, // 0 to MAX + + // RANGE : [MIN_EXP, MAX_EXP] + + // The minimum exponent value, beneath which underflow to zero occurs. + // Number type: -324 (5e-324) + MIN_EXP = -1e7, // -1 to -MAX + + // The maximum exponent value, above which overflow to Infinity occurs. + // Number type: 308 (1.7976931348623157e+308) + // For MAX_EXP > 1e7, e.g. new BigNumber('1e100000000').plus(1) may be slow. + MAX_EXP = 1e7, // 1 to MAX + + // Whether to use cryptographically-secure random number generation, if available. + CRYPTO = false, // true or false + + // The modulo mode used when calculating the modulus: a mod n. + // The quotient (q = a / n) is calculated according to the corresponding rounding mode. + // The remainder (r) is calculated as: r = a - n * q. + // + // UP 0 The remainder is positive if the dividend is negative, else is negative. + // DOWN 1 The remainder has the same sign as the dividend. + // This modulo mode is commonly known as 'truncated division' and is + // equivalent to (a % n) in JavaScript. + // FLOOR 3 The remainder has the same sign as the divisor (Python %). + // HALF_EVEN 6 This modulo mode implements the IEEE 754 remainder function. + // EUCLID 9 Euclidian division. q = sign(n) * floor(a / abs(n)). + // The remainder is always positive. + // + // The truncated division, floored division, Euclidian division and IEEE 754 remainder + // modes are commonly used for the modulus operation. + // Although the other rounding modes can also be used, they may not give useful results. + MODULO_MODE = 1, // 0 to 9 + + // The maximum number of significant digits of the result of the exponentiatedBy operation. + // If POW_PRECISION is 0, there will be unlimited significant digits. + POW_PRECISION = 0, // 0 to MAX + + // The format specification used by the BigNumber.prototype.toFormat method. + FORMAT = { + prefix: '', + groupSize: 3, + secondaryGroupSize: 0, + groupSeparator: ',', + decimalSeparator: '.', + fractionGroupSize: 0, + fractionGroupSeparator: '\xA0', // non-breaking space + suffix: '' + }, + + // The alphabet used for base conversion. It must be at least 2 characters long, with no '+', + // '-', '.', whitespace, or repeated character. + // '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ$_' + ALPHABET = '0123456789abcdefghijklmnopqrstuvwxyz', + alphabetHasNormalDecimalDigits = true; + + + //------------------------------------------------------------------------------------------ + + + // CONSTRUCTOR + + + /* + * The BigNumber constructor and exported function. + * Create and return a new instance of a BigNumber object. + * + * v {number|string|BigNumber} A numeric value. + * [b] {number} The base of v. Integer, 2 to ALPHABET.length inclusive. + */ + function BigNumber(v, b) { + var alphabet, c, caseChanged, e, i, isNum, len, str, + x = this; + + // Enable constructor call without `new`. + if (!(x instanceof BigNumber)) return new BigNumber(v, b); + + if (b == null) { + + if (v && v._isBigNumber === true) { + x.s = v.s; + + if (!v.c || v.e > MAX_EXP) { + x.c = x.e = null; + } else if (v.e < MIN_EXP) { + x.c = [x.e = 0]; + } else { + x.e = v.e; + x.c = v.c.slice(); + } + + return; + } + + if ((isNum = typeof v == 'number') && v * 0 == 0) { + + // Use `1 / n` to handle minus zero also. + x.s = 1 / v < 0 ? (v = -v, -1) : 1; + + // Fast path for integers, where n < 2147483648 (2**31). + if (v === ~~v) { + for (e = 0, i = v; i >= 10; i /= 10, e++); + + if (e > MAX_EXP) { + x.c = x.e = null; + } else { + x.e = e; + x.c = [v]; + } + + return; + } + + str = String(v); + } else { + + if (!isNumeric.test(str = String(v))) return parseNumeric(x, str, isNum); + + x.s = str.charCodeAt(0) == 45 ? (str = str.slice(1), -1) : 1; + } + + // Decimal point? + if ((e = str.indexOf('.')) > -1) str = str.replace('.', ''); + + // Exponential form? + if ((i = str.search(/e/i)) > 0) { + + // Determine exponent. + if (e < 0) e = i; + e += +str.slice(i + 1); + str = str.substring(0, i); + } else if (e < 0) { + + // Integer. + e = str.length; + } + + } else { + + // '[BigNumber Error] Base {not a primitive number|not an integer|out of range}: {b}' + intCheck(b, 2, ALPHABET.length, 'Base'); + + // Allow exponential notation to be used with base 10 argument, while + // also rounding to DECIMAL_PLACES as with other bases. + if (b == 10 && alphabetHasNormalDecimalDigits) { + x = new BigNumber(v); + return round(x, DECIMAL_PLACES + x.e + 1, ROUNDING_MODE); + } + + str = String(v); + + if (isNum = typeof v == 'number') { + + // Avoid potential interpretation of Infinity and NaN as base 44+ values. + if (v * 0 != 0) return parseNumeric(x, str, isNum, b); + + x.s = 1 / v < 0 ? (str = str.slice(1), -1) : 1; + + // '[BigNumber Error] Number primitive has more than 15 significant digits: {n}' + if (BigNumber.DEBUG && str.replace(/^0\.0*|\./, '').length > 15) { + throw Error + (tooManyDigits + v); + } + } else { + x.s = str.charCodeAt(0) === 45 ? (str = str.slice(1), -1) : 1; + } + + alphabet = ALPHABET.slice(0, b); + e = i = 0; + + // Check that str is a valid base b number. + // Don't use RegExp, so alphabet can contain special characters. + for (len = str.length; i < len; i++) { + if (alphabet.indexOf(c = str.charAt(i)) < 0) { + if (c == '.') { + + // If '.' is not the first character and it has not be found before. + if (i > e) { + e = len; + continue; + } + } else if (!caseChanged) { + + // Allow e.g. hexadecimal 'FF' as well as 'ff'. + if (str == str.toUpperCase() && (str = str.toLowerCase()) || + str == str.toLowerCase() && (str = str.toUpperCase())) { + caseChanged = true; + i = -1; + e = 0; + continue; + } + } + + return parseNumeric(x, String(v), isNum, b); + } + } + + // Prevent later check for length on converted number. + isNum = false; + str = convertBase(str, b, 10, x.s); + + // Decimal point? + if ((e = str.indexOf('.')) > -1) str = str.replace('.', ''); + else e = str.length; + } + + // Determine leading zeros. + for (i = 0; str.charCodeAt(i) === 48; i++); + + // Determine trailing zeros. + for (len = str.length; str.charCodeAt(--len) === 48;); + + if (str = str.slice(i, ++len)) { + len -= i; + + // '[BigNumber Error] Number primitive has more than 15 significant digits: {n}' + if (isNum && BigNumber.DEBUG && + len > 15 && (v > MAX_SAFE_INTEGER || v !== mathfloor(v))) { + throw Error + (tooManyDigits + (x.s * v)); + } + + // Overflow? + if ((e = e - i - 1) > MAX_EXP) { + + // Infinity. + x.c = x.e = null; + + // Underflow? + } else if (e < MIN_EXP) { + + // Zero. + x.c = [x.e = 0]; + } else { + x.e = e; + x.c = []; + + // Transform base + + // e is the base 10 exponent. + // i is where to slice str to get the first element of the coefficient array. + i = (e + 1) % LOG_BASE; + if (e < 0) i += LOG_BASE; // i < 1 + + if (i < len) { + if (i) x.c.push(+str.slice(0, i)); + + for (len -= LOG_BASE; i < len;) { + x.c.push(+str.slice(i, i += LOG_BASE)); + } + + i = LOG_BASE - (str = str.slice(i)).length; + } else { + i -= len; + } + + for (; i--; str += '0'); + x.c.push(+str); + } + } else { + + // Zero. + x.c = [x.e = 0]; + } + } + + + // CONSTRUCTOR PROPERTIES + + + BigNumber.clone = clone; + + BigNumber.ROUND_UP = 0; + BigNumber.ROUND_DOWN = 1; + BigNumber.ROUND_CEIL = 2; + BigNumber.ROUND_FLOOR = 3; + BigNumber.ROUND_HALF_UP = 4; + BigNumber.ROUND_HALF_DOWN = 5; + BigNumber.ROUND_HALF_EVEN = 6; + BigNumber.ROUND_HALF_CEIL = 7; + BigNumber.ROUND_HALF_FLOOR = 8; + BigNumber.EUCLID = 9; + + + /* + * Configure infrequently-changing library-wide settings. + * + * Accept an object with the following optional properties (if the value of a property is + * a number, it must be an integer within the inclusive range stated): + * + * DECIMAL_PLACES {number} 0 to MAX + * ROUNDING_MODE {number} 0 to 8 + * EXPONENTIAL_AT {number|number[]} -MAX to MAX or [-MAX to 0, 0 to MAX] + * RANGE {number|number[]} -MAX to MAX (not zero) or [-MAX to -1, 1 to MAX] + * CRYPTO {boolean} true or false + * MODULO_MODE {number} 0 to 9 + * POW_PRECISION {number} 0 to MAX + * ALPHABET {string} A string of two or more unique characters which does + * not contain '.'. + * FORMAT {object} An object with some of the following properties: + * prefix {string} + * groupSize {number} + * secondaryGroupSize {number} + * groupSeparator {string} + * decimalSeparator {string} + * fractionGroupSize {number} + * fractionGroupSeparator {string} + * suffix {string} + * + * (The values assigned to the above FORMAT object properties are not checked for validity.) + * + * E.g. + * BigNumber.config({ DECIMAL_PLACES : 20, ROUNDING_MODE : 4 }) + * + * Ignore properties/parameters set to null or undefined, except for ALPHABET. + * + * Return an object with the properties current values. + */ + BigNumber.config = BigNumber.set = function (obj) { + var p, v; + + if (obj != null) { + + if (typeof obj == 'object') { + + // DECIMAL_PLACES {number} Integer, 0 to MAX inclusive. + // '[BigNumber Error] DECIMAL_PLACES {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'DECIMAL_PLACES')) { + v = obj[p]; + intCheck(v, 0, MAX, p); + DECIMAL_PLACES = v; + } + + // ROUNDING_MODE {number} Integer, 0 to 8 inclusive. + // '[BigNumber Error] ROUNDING_MODE {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'ROUNDING_MODE')) { + v = obj[p]; + intCheck(v, 0, 8, p); + ROUNDING_MODE = v; + } + + // EXPONENTIAL_AT {number|number[]} + // Integer, -MAX to MAX inclusive or + // [integer -MAX to 0 inclusive, 0 to MAX inclusive]. + // '[BigNumber Error] EXPONENTIAL_AT {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'EXPONENTIAL_AT')) { + v = obj[p]; + if (v && v.pop) { + intCheck(v[0], -MAX, 0, p); + intCheck(v[1], 0, MAX, p); + TO_EXP_NEG = v[0]; + TO_EXP_POS = v[1]; + } else { + intCheck(v, -MAX, MAX, p); + TO_EXP_NEG = -(TO_EXP_POS = v < 0 ? -v : v); + } + } + + // RANGE {number|number[]} Non-zero integer, -MAX to MAX inclusive or + // [integer -MAX to -1 inclusive, integer 1 to MAX inclusive]. + // '[BigNumber Error] RANGE {not a primitive number|not an integer|out of range|cannot be zero}: {v}' + if (obj.hasOwnProperty(p = 'RANGE')) { + v = obj[p]; + if (v && v.pop) { + intCheck(v[0], -MAX, -1, p); + intCheck(v[1], 1, MAX, p); + MIN_EXP = v[0]; + MAX_EXP = v[1]; + } else { + intCheck(v, -MAX, MAX, p); + if (v) { + MIN_EXP = -(MAX_EXP = v < 0 ? -v : v); + } else { + throw Error + (bignumberError + p + ' cannot be zero: ' + v); + } + } + } + + // CRYPTO {boolean} true or false. + // '[BigNumber Error] CRYPTO not true or false: {v}' + // '[BigNumber Error] crypto unavailable' + if (obj.hasOwnProperty(p = 'CRYPTO')) { + v = obj[p]; + if (v === !!v) { + if (v) { + if (typeof crypto != 'undefined' && crypto && + (crypto.getRandomValues || crypto.randomBytes)) { + CRYPTO = v; + } else { + CRYPTO = !v; + throw Error + (bignumberError + 'crypto unavailable'); + } + } else { + CRYPTO = v; + } + } else { + throw Error + (bignumberError + p + ' not true or false: ' + v); + } + } + + // MODULO_MODE {number} Integer, 0 to 9 inclusive. + // '[BigNumber Error] MODULO_MODE {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'MODULO_MODE')) { + v = obj[p]; + intCheck(v, 0, 9, p); + MODULO_MODE = v; + } + + // POW_PRECISION {number} Integer, 0 to MAX inclusive. + // '[BigNumber Error] POW_PRECISION {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'POW_PRECISION')) { + v = obj[p]; + intCheck(v, 0, MAX, p); + POW_PRECISION = v; + } + + // FORMAT {object} + // '[BigNumber Error] FORMAT not an object: {v}' + if (obj.hasOwnProperty(p = 'FORMAT')) { + v = obj[p]; + if (typeof v == 'object') FORMAT = v; + else throw Error + (bignumberError + p + ' not an object: ' + v); + } + + // ALPHABET {string} + // '[BigNumber Error] ALPHABET invalid: {v}' + if (obj.hasOwnProperty(p = 'ALPHABET')) { + v = obj[p]; + + // Disallow if less than two characters, + // or if it contains '+', '-', '.', whitespace, or a repeated character. + if (typeof v == 'string' && !/^.?$|[+\-.\s]|(.).*\1/.test(v)) { + alphabetHasNormalDecimalDigits = v.slice(0, 10) == '0123456789'; + ALPHABET = v; + } else { + throw Error + (bignumberError + p + ' invalid: ' + v); + } + } + + } else { + + // '[BigNumber Error] Object expected: {v}' + throw Error + (bignumberError + 'Object expected: ' + obj); + } + } + + return { + DECIMAL_PLACES: DECIMAL_PLACES, + ROUNDING_MODE: ROUNDING_MODE, + EXPONENTIAL_AT: [TO_EXP_NEG, TO_EXP_POS], + RANGE: [MIN_EXP, MAX_EXP], + CRYPTO: CRYPTO, + MODULO_MODE: MODULO_MODE, + POW_PRECISION: POW_PRECISION, + FORMAT: FORMAT, + ALPHABET: ALPHABET + }; + }; + + + /* + * Return true if v is a BigNumber instance, otherwise return false. + * + * If BigNumber.DEBUG is true, throw if a BigNumber instance is not well-formed. + * + * v {any} + * + * '[BigNumber Error] Invalid BigNumber: {v}' + */ + BigNumber.isBigNumber = function (v) { + if (!v || v._isBigNumber !== true) return false; + if (!BigNumber.DEBUG) return true; + + var i, n, + c = v.c, + e = v.e, + s = v.s; + + out: if ({}.toString.call(c) == '[object Array]') { + + if ((s === 1 || s === -1) && e >= -MAX && e <= MAX && e === mathfloor(e)) { + + // If the first element is zero, the BigNumber value must be zero. + if (c[0] === 0) { + if (e === 0 && c.length === 1) return true; + break out; + } + + // Calculate number of digits that c[0] should have, based on the exponent. + i = (e + 1) % LOG_BASE; + if (i < 1) i += LOG_BASE; + + // Calculate number of digits of c[0]. + //if (Math.ceil(Math.log(c[0] + 1) / Math.LN10) == i) { + if (String(c[0]).length == i) { + + for (i = 0; i < c.length; i++) { + n = c[i]; + if (n < 0 || n >= BASE || n !== mathfloor(n)) break out; + } + + // Last element cannot be zero, unless it is the only element. + if (n !== 0) return true; + } + } + + // Infinity/NaN + } else if (c === null && e === null && (s === null || s === 1 || s === -1)) { + return true; + } + + throw Error + (bignumberError + 'Invalid BigNumber: ' + v); + }; + + + /* + * Return a new BigNumber whose value is the maximum of the arguments. + * + * arguments {number|string|BigNumber} + */ + BigNumber.maximum = BigNumber.max = function () { + return maxOrMin(arguments, -1); + }; + + + /* + * Return a new BigNumber whose value is the minimum of the arguments. + * + * arguments {number|string|BigNumber} + */ + BigNumber.minimum = BigNumber.min = function () { + return maxOrMin(arguments, 1); + }; + + + /* + * Return a new BigNumber with a random value equal to or greater than 0 and less than 1, + * and with dp, or DECIMAL_PLACES if dp is omitted, decimal places (or less if trailing + * zeros are produced). + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp}' + * '[BigNumber Error] crypto unavailable' + */ + BigNumber.random = (function () { + var pow2_53 = 0x20000000000000; + + // Return a 53 bit integer n, where 0 <= n < 9007199254740992. + // Check if Math.random() produces more than 32 bits of randomness. + // If it does, assume at least 53 bits are produced, otherwise assume at least 30 bits. + // 0x40000000 is 2^30, 0x800000 is 2^23, 0x1fffff is 2^21 - 1. + var random53bitInt = (Math.random() * pow2_53) & 0x1fffff + ? function () { return mathfloor(Math.random() * pow2_53); } + : function () { return ((Math.random() * 0x40000000 | 0) * 0x800000) + + (Math.random() * 0x800000 | 0); }; + + return function (dp) { + var a, b, e, k, v, + i = 0, + c = [], + rand = new BigNumber(ONE); + + if (dp == null) dp = DECIMAL_PLACES; + else intCheck(dp, 0, MAX); + + k = mathceil(dp / LOG_BASE); + + if (CRYPTO) { + + // Browsers supporting crypto.getRandomValues. + if (crypto.getRandomValues) { + + a = crypto.getRandomValues(new Uint32Array(k *= 2)); + + for (; i < k;) { + + // 53 bits: + // ((Math.pow(2, 32) - 1) * Math.pow(2, 21)).toString(2) + // 11111 11111111 11111111 11111111 11100000 00000000 00000000 + // ((Math.pow(2, 32) - 1) >>> 11).toString(2) + // 11111 11111111 11111111 + // 0x20000 is 2^21. + v = a[i] * 0x20000 + (a[i + 1] >>> 11); + + // Rejection sampling: + // 0 <= v < 9007199254740992 + // Probability that v >= 9e15, is + // 7199254740992 / 9007199254740992 ~= 0.0008, i.e. 1 in 1251 + if (v >= 9e15) { + b = crypto.getRandomValues(new Uint32Array(2)); + a[i] = b[0]; + a[i + 1] = b[1]; + } else { + + // 0 <= v <= 8999999999999999 + // 0 <= (v % 1e14) <= 99999999999999 + c.push(v % 1e14); + i += 2; + } + } + i = k / 2; + + // Node.js supporting crypto.randomBytes. + } else if (crypto.randomBytes) { + + // buffer + a = crypto.randomBytes(k *= 7); + + for (; i < k;) { + + // 0x1000000000000 is 2^48, 0x10000000000 is 2^40 + // 0x100000000 is 2^32, 0x1000000 is 2^24 + // 11111 11111111 11111111 11111111 11111111 11111111 11111111 + // 0 <= v < 9007199254740992 + v = ((a[i] & 31) * 0x1000000000000) + (a[i + 1] * 0x10000000000) + + (a[i + 2] * 0x100000000) + (a[i + 3] * 0x1000000) + + (a[i + 4] << 16) + (a[i + 5] << 8) + a[i + 6]; + + if (v >= 9e15) { + crypto.randomBytes(7).copy(a, i); + } else { + + // 0 <= (v % 1e14) <= 99999999999999 + c.push(v % 1e14); + i += 7; + } + } + i = k / 7; + } else { + CRYPTO = false; + throw Error + (bignumberError + 'crypto unavailable'); + } + } + + // Use Math.random. + if (!CRYPTO) { + + for (; i < k;) { + v = random53bitInt(); + if (v < 9e15) c[i++] = v % 1e14; + } + } + + k = c[--i]; + dp %= LOG_BASE; + + // Convert trailing digits to zeros according to dp. + if (k && dp) { + v = POWS_TEN[LOG_BASE - dp]; + c[i] = mathfloor(k / v) * v; + } + + // Remove trailing elements which are zero. + for (; c[i] === 0; c.pop(), i--); + + // Zero? + if (i < 0) { + c = [e = 0]; + } else { + + // Remove leading elements which are zero and adjust exponent accordingly. + for (e = -1 ; c[0] === 0; c.splice(0, 1), e -= LOG_BASE); + + // Count the digits of the first element of c to determine leading zeros, and... + for (i = 1, v = c[0]; v >= 10; v /= 10, i++); + + // adjust the exponent accordingly. + if (i < LOG_BASE) e -= LOG_BASE - i; + } + + rand.e = e; + rand.c = c; + return rand; + }; + })(); + + + /* + * Return a BigNumber whose value is the sum of the arguments. + * + * arguments {number|string|BigNumber} + */ + BigNumber.sum = function () { + var i = 1, + args = arguments, + sum = new BigNumber(args[0]); + for (; i < args.length;) sum = sum.plus(args[i++]); + return sum; + }; + + + // PRIVATE FUNCTIONS + + + // Called by BigNumber and BigNumber.prototype.toString. + convertBase = (function () { + var decimal = '0123456789'; + + /* + * Convert string of baseIn to an array of numbers of baseOut. + * Eg. toBaseOut('255', 10, 16) returns [15, 15]. + * Eg. toBaseOut('ff', 16, 10) returns [2, 5, 5]. + */ + function toBaseOut(str, baseIn, baseOut, alphabet) { + var j, + arr = [0], + arrL, + i = 0, + len = str.length; + + for (; i < len;) { + for (arrL = arr.length; arrL--; arr[arrL] *= baseIn); + + arr[0] += alphabet.indexOf(str.charAt(i++)); + + for (j = 0; j < arr.length; j++) { + + if (arr[j] > baseOut - 1) { + if (arr[j + 1] == null) arr[j + 1] = 0; + arr[j + 1] += arr[j] / baseOut | 0; + arr[j] %= baseOut; + } + } + } + + return arr.reverse(); + } + + // Convert a numeric string of baseIn to a numeric string of baseOut. + // If the caller is toString, we are converting from base 10 to baseOut. + // If the caller is BigNumber, we are converting from baseIn to base 10. + return function (str, baseIn, baseOut, sign, callerIsToString) { + var alphabet, d, e, k, r, x, xc, y, + i = str.indexOf('.'), + dp = DECIMAL_PLACES, + rm = ROUNDING_MODE; + + // Non-integer. + if (i >= 0) { + k = POW_PRECISION; + + // Unlimited precision. + POW_PRECISION = 0; + str = str.replace('.', ''); + y = new BigNumber(baseIn); + x = y.pow(str.length - i); + POW_PRECISION = k; + + // Convert str as if an integer, then restore the fraction part by dividing the + // result by its base raised to a power. + + y.c = toBaseOut(toFixedPoint(coeffToString(x.c), x.e, '0'), + 10, baseOut, decimal); + y.e = y.c.length; + } + + // Convert the number as integer. + + xc = toBaseOut(str, baseIn, baseOut, callerIsToString + ? (alphabet = ALPHABET, decimal) + : (alphabet = decimal, ALPHABET)); + + // xc now represents str as an integer and converted to baseOut. e is the exponent. + e = k = xc.length; + + // Remove trailing zeros. + for (; xc[--k] == 0; xc.pop()); + + // Zero? + if (!xc[0]) return alphabet.charAt(0); + + // Does str represent an integer? If so, no need for the division. + if (i < 0) { + --e; + } else { + x.c = xc; + x.e = e; + + // The sign is needed for correct rounding. + x.s = sign; + x = div(x, y, dp, rm, baseOut); + xc = x.c; + r = x.r; + e = x.e; + } + + // xc now represents str converted to baseOut. + + // The index of the rounding digit. + d = e + dp + 1; + + // The rounding digit: the digit to the right of the digit that may be rounded up. + i = xc[d]; + + // Look at the rounding digits and mode to determine whether to round up. + + k = baseOut / 2; + r = r || d < 0 || xc[d + 1] != null; + + r = rm < 4 ? (i != null || r) && (rm == 0 || rm == (x.s < 0 ? 3 : 2)) + : i > k || i == k &&(rm == 4 || r || rm == 6 && xc[d - 1] & 1 || + rm == (x.s < 0 ? 8 : 7)); + + // If the index of the rounding digit is not greater than zero, or xc represents + // zero, then the result of the base conversion is zero or, if rounding up, a value + // such as 0.00001. + if (d < 1 || !xc[0]) { + + // 1^-dp or 0 + str = r ? toFixedPoint(alphabet.charAt(1), -dp, alphabet.charAt(0)) : alphabet.charAt(0); + } else { + + // Truncate xc to the required number of decimal places. + xc.length = d; + + // Round up? + if (r) { + + // Rounding up may mean the previous digit has to be rounded up and so on. + for (--baseOut; ++xc[--d] > baseOut;) { + xc[d] = 0; + + if (!d) { + ++e; + xc = [1].concat(xc); + } + } + } + + // Determine trailing zeros. + for (k = xc.length; !xc[--k];); + + // E.g. [4, 11, 15] becomes 4bf. + for (i = 0, str = ''; i <= k; str += alphabet.charAt(xc[i++])); + + // Add leading zeros, decimal point and trailing zeros as required. + str = toFixedPoint(str, e, alphabet.charAt(0)); + } + + // The caller will add the sign. + return str; + }; + })(); + + + // Perform division in the specified base. Called by div and convertBase. + div = (function () { + + // Assume non-zero x and k. + function multiply(x, k, base) { + var m, temp, xlo, xhi, + carry = 0, + i = x.length, + klo = k % SQRT_BASE, + khi = k / SQRT_BASE | 0; + + for (x = x.slice(); i--;) { + xlo = x[i] % SQRT_BASE; + xhi = x[i] / SQRT_BASE | 0; + m = khi * xlo + xhi * klo; + temp = klo * xlo + ((m % SQRT_BASE) * SQRT_BASE) + carry; + carry = (temp / base | 0) + (m / SQRT_BASE | 0) + khi * xhi; + x[i] = temp % base; + } + + if (carry) x = [carry].concat(x); + + return x; + } + + function compare(a, b, aL, bL) { + var i, cmp; + + if (aL != bL) { + cmp = aL > bL ? 1 : -1; + } else { + + for (i = cmp = 0; i < aL; i++) { + + if (a[i] != b[i]) { + cmp = a[i] > b[i] ? 1 : -1; + break; + } + } + } + + return cmp; + } + + function subtract(a, b, aL, base) { + var i = 0; + + // Subtract b from a. + for (; aL--;) { + a[aL] -= i; + i = a[aL] < b[aL] ? 1 : 0; + a[aL] = i * base + a[aL] - b[aL]; + } + + // Remove leading zeros. + for (; !a[0] && a.length > 1; a.splice(0, 1)); + } + + // x: dividend, y: divisor. + return function (x, y, dp, rm, base) { + var cmp, e, i, more, n, prod, prodL, q, qc, rem, remL, rem0, xi, xL, yc0, + yL, yz, + s = x.s == y.s ? 1 : -1, + xc = x.c, + yc = y.c; + + // Either NaN, Infinity or 0? + if (!xc || !xc[0] || !yc || !yc[0]) { + + return new BigNumber( + + // Return NaN if either NaN, or both Infinity or 0. + !x.s || !y.s || (xc ? yc && xc[0] == yc[0] : !yc) ? NaN : + + // Return ±0 if x is ±0 or y is ±Infinity, or return ±Infinity as y is ±0. + xc && xc[0] == 0 || !yc ? s * 0 : s / 0 + ); + } + + q = new BigNumber(s); + qc = q.c = []; + e = x.e - y.e; + s = dp + e + 1; + + if (!base) { + base = BASE; + e = bitFloor(x.e / LOG_BASE) - bitFloor(y.e / LOG_BASE); + s = s / LOG_BASE | 0; + } + + // Result exponent may be one less then the current value of e. + // The coefficients of the BigNumbers from convertBase may have trailing zeros. + for (i = 0; yc[i] == (xc[i] || 0); i++); + + if (yc[i] > (xc[i] || 0)) e--; + + if (s < 0) { + qc.push(1); + more = true; + } else { + xL = xc.length; + yL = yc.length; + i = 0; + s += 2; + + // Normalise xc and yc so highest order digit of yc is >= base / 2. + + n = mathfloor(base / (yc[0] + 1)); + + // Not necessary, but to handle odd bases where yc[0] == (base / 2) - 1. + // if (n > 1 || n++ == 1 && yc[0] < base / 2) { + if (n > 1) { + yc = multiply(yc, n, base); + xc = multiply(xc, n, base); + yL = yc.length; + xL = xc.length; + } + + xi = yL; + rem = xc.slice(0, yL); + remL = rem.length; + + // Add zeros to make remainder as long as divisor. + for (; remL < yL; rem[remL++] = 0); + yz = yc.slice(); + yz = [0].concat(yz); + yc0 = yc[0]; + if (yc[1] >= base / 2) yc0++; + // Not necessary, but to prevent trial digit n > base, when using base 3. + // else if (base == 3 && yc0 == 1) yc0 = 1 + 1e-15; + + do { + n = 0; + + // Compare divisor and remainder. + cmp = compare(yc, rem, yL, remL); + + // If divisor < remainder. + if (cmp < 0) { + + // Calculate trial digit, n. + + rem0 = rem[0]; + if (yL != remL) rem0 = rem0 * base + (rem[1] || 0); + + // n is how many times the divisor goes into the current remainder. + n = mathfloor(rem0 / yc0); + + // Algorithm: + // product = divisor multiplied by trial digit (n). + // Compare product and remainder. + // If product is greater than remainder: + // Subtract divisor from product, decrement trial digit. + // Subtract product from remainder. + // If product was less than remainder at the last compare: + // Compare new remainder and divisor. + // If remainder is greater than divisor: + // Subtract divisor from remainder, increment trial digit. + + if (n > 1) { + + // n may be > base only when base is 3. + if (n >= base) n = base - 1; + + // product = divisor * trial digit. + prod = multiply(yc, n, base); + prodL = prod.length; + remL = rem.length; + + // Compare product and remainder. + // If product > remainder then trial digit n too high. + // n is 1 too high about 5% of the time, and is not known to have + // ever been more than 1 too high. + while (compare(prod, rem, prodL, remL) == 1) { + n--; + + // Subtract divisor from product. + subtract(prod, yL < prodL ? yz : yc, prodL, base); + prodL = prod.length; + cmp = 1; + } + } else { + + // n is 0 or 1, cmp is -1. + // If n is 0, there is no need to compare yc and rem again below, + // so change cmp to 1 to avoid it. + // If n is 1, leave cmp as -1, so yc and rem are compared again. + if (n == 0) { + + // divisor < remainder, so n must be at least 1. + cmp = n = 1; + } + + // product = divisor + prod = yc.slice(); + prodL = prod.length; + } + + if (prodL < remL) prod = [0].concat(prod); + + // Subtract product from remainder. + subtract(rem, prod, remL, base); + remL = rem.length; + + // If product was < remainder. + if (cmp == -1) { + + // Compare divisor and new remainder. + // If divisor < new remainder, subtract divisor from remainder. + // Trial digit n too low. + // n is 1 too low about 5% of the time, and very rarely 2 too low. + while (compare(yc, rem, yL, remL) < 1) { + n++; + + // Subtract divisor from remainder. + subtract(rem, yL < remL ? yz : yc, remL, base); + remL = rem.length; + } + } + } else if (cmp === 0) { + n++; + rem = [0]; + } // else cmp === 1 and n will be 0 + + // Add the next digit, n, to the result array. + qc[i++] = n; + + // Update the remainder. + if (rem[0]) { + rem[remL++] = xc[xi] || 0; + } else { + rem = [xc[xi]]; + remL = 1; + } + } while ((xi++ < xL || rem[0] != null) && s--); + + more = rem[0] != null; + + // Leading zero? + if (!qc[0]) qc.splice(0, 1); + } + + if (base == BASE) { + + // To calculate q.e, first get the number of digits of qc[0]. + for (i = 1, s = qc[0]; s >= 10; s /= 10, i++); + + round(q, dp + (q.e = i + e * LOG_BASE - 1) + 1, rm, more); + + // Caller is convertBase. + } else { + q.e = e; + q.r = +more; + } + + return q; + }; + })(); + + + /* + * Return a string representing the value of BigNumber n in fixed-point or exponential + * notation rounded to the specified decimal places or significant digits. + * + * n: a BigNumber. + * i: the index of the last digit required (i.e. the digit that may be rounded up). + * rm: the rounding mode. + * id: 1 (toExponential) or 2 (toPrecision). + */ + function format(n, i, rm, id) { + var c0, e, ne, len, str; + + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); + + if (!n.c) return n.toString(); + + c0 = n.c[0]; + ne = n.e; + + if (i == null) { + str = coeffToString(n.c); + str = id == 1 || id == 2 && (ne <= TO_EXP_NEG || ne >= TO_EXP_POS) + ? toExponential(str, ne) + : toFixedPoint(str, ne, '0'); + } else { + n = round(new BigNumber(n), i, rm); + + // n.e may have changed if the value was rounded up. + e = n.e; + + str = coeffToString(n.c); + len = str.length; + + // toPrecision returns exponential notation if the number of significant digits + // specified is less than the number of digits necessary to represent the integer + // part of the value in fixed-point notation. + + // Exponential notation. + if (id == 1 || id == 2 && (i <= e || e <= TO_EXP_NEG)) { + + // Append zeros? + for (; len < i; str += '0', len++); + str = toExponential(str, e); + + // Fixed-point notation. + } else { + i -= ne; + str = toFixedPoint(str, e, '0'); + + // Append zeros? + if (e + 1 > len) { + if (--i > 0) for (str += '.'; i--; str += '0'); + } else { + i += e - len; + if (i > 0) { + if (e + 1 == len) str += '.'; + for (; i--; str += '0'); + } + } + } + } + + return n.s < 0 && c0 ? '-' + str : str; + } + + + // Handle BigNumber.max and BigNumber.min. + // If any number is NaN, return NaN. + function maxOrMin(args, n) { + var k, y, + i = 1, + x = new BigNumber(args[0]); + + for (; i < args.length; i++) { + y = new BigNumber(args[i]); + if (!y.s || (k = compare(x, y)) === n || k === 0 && x.s === n) { + x = y; + } + } + + return x; + } + + + /* + * Strip trailing zeros, calculate base 10 exponent and check against MIN_EXP and MAX_EXP. + * Called by minus, plus and times. + */ + function normalise(n, c, e) { + var i = 1, + j = c.length; + + // Remove trailing zeros. + for (; !c[--j]; c.pop()); + + // Calculate the base 10 exponent. First get the number of digits of c[0]. + for (j = c[0]; j >= 10; j /= 10, i++); + + // Overflow? + if ((e = i + e * LOG_BASE - 1) > MAX_EXP) { + + // Infinity. + n.c = n.e = null; + + // Underflow? + } else if (e < MIN_EXP) { + + // Zero. + n.c = [n.e = 0]; + } else { + n.e = e; + n.c = c; + } + + return n; + } + + + // Handle values that fail the validity test in BigNumber. + parseNumeric = (function () { + var basePrefix = /^(-?)0([xbo])(?=\w[\w.]*$)/i, + dotAfter = /^([^.]+)\.$/, + dotBefore = /^\.([^.]+)$/, + isInfinityOrNaN = /^-?(Infinity|NaN)$/, + whitespaceOrPlus = /^\s*\+(?=[\w.])|^\s+|\s+$/g; + + return function (x, str, isNum, b) { + var base, + s = isNum ? str : str.replace(whitespaceOrPlus, ''); + + // No exception on ±Infinity or NaN. + if (isInfinityOrNaN.test(s)) { + x.s = isNaN(s) ? null : s < 0 ? -1 : 1; + } else { + if (!isNum) { + + // basePrefix = /^(-?)0([xbo])(?=\w[\w.]*$)/i + s = s.replace(basePrefix, function (m, p1, p2) { + base = (p2 = p2.toLowerCase()) == 'x' ? 16 : p2 == 'b' ? 2 : 8; + return !b || b == base ? p1 : m; + }); + + if (b) { + base = b; + + // E.g. '1.' to '1', '.1' to '0.1' + s = s.replace(dotAfter, '$1').replace(dotBefore, '0.$1'); + } + + if (str != s) return new BigNumber(s, base); + } + + // '[BigNumber Error] Not a number: {n}' + // '[BigNumber Error] Not a base {b} number: {n}' + if (BigNumber.DEBUG) { + throw Error + (bignumberError + 'Not a' + (b ? ' base ' + b : '') + ' number: ' + str); + } + + // NaN + x.s = null; + } + + x.c = x.e = null; + } + })(); + + + /* + * Round x to sd significant digits using rounding mode rm. Check for over/under-flow. + * If r is truthy, it is known that there are more digits after the rounding digit. + */ + function round(x, sd, rm, r) { + var d, i, j, k, n, ni, rd, + xc = x.c, + pows10 = POWS_TEN; + + // if x is not Infinity or NaN... + if (xc) { + + // rd is the rounding digit, i.e. the digit after the digit that may be rounded up. + // n is a base 1e14 number, the value of the element of array x.c containing rd. + // ni is the index of n within x.c. + // d is the number of digits of n. + // i is the index of rd within n including leading zeros. + // j is the actual index of rd within n (if < 0, rd is a leading zero). + out: { + + // Get the number of digits of the first element of xc. + for (d = 1, k = xc[0]; k >= 10; k /= 10, d++); + i = sd - d; + + // If the rounding digit is in the first element of xc... + if (i < 0) { + i += LOG_BASE; + j = sd; + n = xc[ni = 0]; + + // Get the rounding digit at index j of n. + rd = mathfloor(n / pows10[d - j - 1] % 10); + } else { + ni = mathceil((i + 1) / LOG_BASE); + + if (ni >= xc.length) { + + if (r) { + + // Needed by sqrt. + for (; xc.length <= ni; xc.push(0)); + n = rd = 0; + d = 1; + i %= LOG_BASE; + j = i - LOG_BASE + 1; + } else { + break out; + } + } else { + n = k = xc[ni]; + + // Get the number of digits of n. + for (d = 1; k >= 10; k /= 10, d++); + + // Get the index of rd within n. + i %= LOG_BASE; + + // Get the index of rd within n, adjusted for leading zeros. + // The number of leading zeros of n is given by LOG_BASE - d. + j = i - LOG_BASE + d; + + // Get the rounding digit at index j of n. + rd = j < 0 ? 0 : mathfloor(n / pows10[d - j - 1] % 10); + } + } + + r = r || sd < 0 || + + // Are there any non-zero digits after the rounding digit? + // The expression n % pows10[d - j - 1] returns all digits of n to the right + // of the digit at j, e.g. if n is 908714 and j is 2, the expression gives 714. + xc[ni + 1] != null || (j < 0 ? n : n % pows10[d - j - 1]); + + r = rm < 4 + ? (rd || r) && (rm == 0 || rm == (x.s < 0 ? 3 : 2)) + : rd > 5 || rd == 5 && (rm == 4 || r || rm == 6 && + + // Check whether the digit to the left of the rounding digit is odd. + ((i > 0 ? j > 0 ? n / pows10[d - j] : 0 : xc[ni - 1]) % 10) & 1 || + rm == (x.s < 0 ? 8 : 7)); + + if (sd < 1 || !xc[0]) { + xc.length = 0; + + if (r) { + + // Convert sd to decimal places. + sd -= x.e + 1; + + // 1, 0.1, 0.01, 0.001, 0.0001 etc. + xc[0] = pows10[(LOG_BASE - sd % LOG_BASE) % LOG_BASE]; + x.e = -sd || 0; + } else { + + // Zero. + xc[0] = x.e = 0; + } + + return x; + } + + // Remove excess digits. + if (i == 0) { + xc.length = ni; + k = 1; + ni--; + } else { + xc.length = ni + 1; + k = pows10[LOG_BASE - i]; + + // E.g. 56700 becomes 56000 if 7 is the rounding digit. + // j > 0 means i > number of leading zeros of n. + xc[ni] = j > 0 ? mathfloor(n / pows10[d - j] % pows10[j]) * k : 0; + } + + // Round up? + if (r) { + + for (; ;) { + + // If the digit to be rounded up is in the first element of xc... + if (ni == 0) { + + // i will be the length of xc[0] before k is added. + for (i = 1, j = xc[0]; j >= 10; j /= 10, i++); + j = xc[0] += k; + for (k = 1; j >= 10; j /= 10, k++); + + // if i != k the length has increased. + if (i != k) { + x.e++; + if (xc[0] == BASE) xc[0] = 1; + } + + break; + } else { + xc[ni] += k; + if (xc[ni] != BASE) break; + xc[ni--] = 0; + k = 1; + } + } + } + + // Remove trailing zeros. + for (i = xc.length; xc[--i] === 0; xc.pop()); + } + + // Overflow? Infinity. + if (x.e > MAX_EXP) { + x.c = x.e = null; + + // Underflow? Zero. + } else if (x.e < MIN_EXP) { + x.c = [x.e = 0]; + } + } + + return x; + } + + + function valueOf(n) { + var str, + e = n.e; + + if (e === null) return n.toString(); + + str = coeffToString(n.c); + + str = e <= TO_EXP_NEG || e >= TO_EXP_POS + ? toExponential(str, e) + : toFixedPoint(str, e, '0'); + + return n.s < 0 ? '-' + str : str; + } + + + // PROTOTYPE/INSTANCE METHODS + + + /* + * Return a new BigNumber whose value is the absolute value of this BigNumber. + */ + P.absoluteValue = P.abs = function () { + var x = new BigNumber(this); + if (x.s < 0) x.s = 1; + return x; + }; + + + /* + * Return + * 1 if the value of this BigNumber is greater than the value of BigNumber(y, b), + * -1 if the value of this BigNumber is less than the value of BigNumber(y, b), + * 0 if they have the same value, + * or null if the value of either is NaN. + */ + P.comparedTo = function (y, b) { + return compare(this, new BigNumber(y, b)); + }; + + + /* + * If dp is undefined or null or true or false, return the number of decimal places of the + * value of this BigNumber, or null if the value of this BigNumber is ±Infinity or NaN. + * + * Otherwise, if dp is a number, return a new BigNumber whose value is the value of this + * BigNumber rounded to a maximum of dp decimal places using rounding mode rm, or + * ROUNDING_MODE if rm is omitted. + * + * [dp] {number} Decimal places: integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + */ + P.decimalPlaces = P.dp = function (dp, rm) { + var c, n, v, + x = this; + + if (dp != null) { + intCheck(dp, 0, MAX); + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); + + return round(new BigNumber(x), dp + x.e + 1, rm); + } + + if (!(c = x.c)) return null; + n = ((v = c.length - 1) - bitFloor(this.e / LOG_BASE)) * LOG_BASE; + + // Subtract the number of trailing zeros of the last number. + if (v = c[v]) for (; v % 10 == 0; v /= 10, n--); + if (n < 0) n = 0; + + return n; + }; + + + /* + * n / 0 = I + * n / N = N + * n / I = 0 + * 0 / n = 0 + * 0 / 0 = N + * 0 / N = N + * 0 / I = 0 + * N / n = N + * N / 0 = N + * N / N = N + * N / I = N + * I / n = I + * I / 0 = I + * I / N = N + * I / I = N + * + * Return a new BigNumber whose value is the value of this BigNumber divided by the value of + * BigNumber(y, b), rounded according to DECIMAL_PLACES and ROUNDING_MODE. + */ + P.dividedBy = P.div = function (y, b) { + return div(this, new BigNumber(y, b), DECIMAL_PLACES, ROUNDING_MODE); + }; + + + /* + * Return a new BigNumber whose value is the integer part of dividing the value of this + * BigNumber by the value of BigNumber(y, b). + */ + P.dividedToIntegerBy = P.idiv = function (y, b) { + return div(this, new BigNumber(y, b), 0, 1); + }; + + + /* + * Return a BigNumber whose value is the value of this BigNumber exponentiated by n. + * + * If m is present, return the result modulo m. + * If n is negative round according to DECIMAL_PLACES and ROUNDING_MODE. + * If POW_PRECISION is non-zero and m is not present, round to POW_PRECISION using ROUNDING_MODE. + * + * The modular power operation works efficiently when x, n, and m are integers, otherwise it + * is equivalent to calculating x.exponentiatedBy(n).modulo(m) with a POW_PRECISION of 0. + * + * n {number|string|BigNumber} The exponent. An integer. + * [m] {number|string|BigNumber} The modulus. + * + * '[BigNumber Error] Exponent not an integer: {n}' + */ + P.exponentiatedBy = P.pow = function (n, m) { + var half, isModExp, i, k, more, nIsBig, nIsNeg, nIsOdd, y, + x = this; + + n = new BigNumber(n); + + // Allow NaN and ±Infinity, but not other non-integers. + if (n.c && !n.isInteger()) { + throw Error + (bignumberError + 'Exponent not an integer: ' + valueOf(n)); + } + + if (m != null) m = new BigNumber(m); + + // Exponent of MAX_SAFE_INTEGER is 15. + nIsBig = n.e > 14; + + // If x is NaN, ±Infinity, ±0 or ±1, or n is ±Infinity, NaN or ±0. + if (!x.c || !x.c[0] || x.c[0] == 1 && !x.e && x.c.length == 1 || !n.c || !n.c[0]) { + + // The sign of the result of pow when x is negative depends on the evenness of n. + // If +n overflows to ±Infinity, the evenness of n would be not be known. + y = new BigNumber(Math.pow(+valueOf(x), nIsBig ? n.s * (2 - isOdd(n)) : +valueOf(n))); + return m ? y.mod(m) : y; + } + + nIsNeg = n.s < 0; + + if (m) { + + // x % m returns NaN if abs(m) is zero, or m is NaN. + if (m.c ? !m.c[0] : !m.s) return new BigNumber(NaN); + + isModExp = !nIsNeg && x.isInteger() && m.isInteger(); + + if (isModExp) x = x.mod(m); + + // Overflow to ±Infinity: >=2**1e10 or >=1.0000024**1e15. + // Underflow to ±0: <=0.79**1e10 or <=0.9999975**1e15. + } else if (n.e > 9 && (x.e > 0 || x.e < -1 || (x.e == 0 + // [1, 240000000] + ? x.c[0] > 1 || nIsBig && x.c[1] >= 24e7 + // [80000000000000] [99999750000000] + : x.c[0] < 8e13 || nIsBig && x.c[0] <= 9999975e7))) { + + // If x is negative and n is odd, k = -0, else k = 0. + k = x.s < 0 && isOdd(n) ? -0 : 0; + + // If x >= 1, k = ±Infinity. + if (x.e > -1) k = 1 / k; + + // If n is negative return ±0, else return ±Infinity. + return new BigNumber(nIsNeg ? 1 / k : k); + + } else if (POW_PRECISION) { + + // Truncating each coefficient array to a length of k after each multiplication + // equates to truncating significant digits to POW_PRECISION + [28, 41], + // i.e. there will be a minimum of 28 guard digits retained. + k = mathceil(POW_PRECISION / LOG_BASE + 2); + } + + if (nIsBig) { + half = new BigNumber(0.5); + if (nIsNeg) n.s = 1; + nIsOdd = isOdd(n); + } else { + i = Math.abs(+valueOf(n)); + nIsOdd = i % 2; + } + + y = new BigNumber(ONE); + + // Performs 54 loop iterations for n of 9007199254740991. + for (; ;) { + + if (nIsOdd) { + y = y.times(x); + if (!y.c) break; + + if (k) { + if (y.c.length > k) y.c.length = k; + } else if (isModExp) { + y = y.mod(m); //y = y.minus(div(y, m, 0, MODULO_MODE).times(m)); + } + } + + if (i) { + i = mathfloor(i / 2); + if (i === 0) break; + nIsOdd = i % 2; + } else { + n = n.times(half); + round(n, n.e + 1, 1); + + if (n.e > 14) { + nIsOdd = isOdd(n); + } else { + i = +valueOf(n); + if (i === 0) break; + nIsOdd = i % 2; + } + } + + x = x.times(x); + + if (k) { + if (x.c && x.c.length > k) x.c.length = k; + } else if (isModExp) { + x = x.mod(m); //x = x.minus(div(x, m, 0, MODULO_MODE).times(m)); + } + } + + if (isModExp) return y; + if (nIsNeg) y = ONE.div(y); + + return m ? y.mod(m) : k ? round(y, POW_PRECISION, ROUNDING_MODE, more) : y; + }; + + + /* + * Return a new BigNumber whose value is the value of this BigNumber rounded to an integer + * using rounding mode rm, or ROUNDING_MODE if rm is omitted. + * + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {rm}' + */ + P.integerValue = function (rm) { + var n = new BigNumber(this); + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); + return round(n, n.e + 1, rm); + }; + + + /* + * Return true if the value of this BigNumber is equal to the value of BigNumber(y, b), + * otherwise return false. + */ + P.isEqualTo = P.eq = function (y, b) { + return compare(this, new BigNumber(y, b)) === 0; + }; + + + /* + * Return true if the value of this BigNumber is a finite number, otherwise return false. + */ + P.isFinite = function () { + return !!this.c; + }; + + + /* + * Return true if the value of this BigNumber is greater than the value of BigNumber(y, b), + * otherwise return false. + */ + P.isGreaterThan = P.gt = function (y, b) { + return compare(this, new BigNumber(y, b)) > 0; + }; + + + /* + * Return true if the value of this BigNumber is greater than or equal to the value of + * BigNumber(y, b), otherwise return false. + */ + P.isGreaterThanOrEqualTo = P.gte = function (y, b) { + return (b = compare(this, new BigNumber(y, b))) === 1 || b === 0; + + }; + + + /* + * Return true if the value of this BigNumber is an integer, otherwise return false. + */ + P.isInteger = function () { + return !!this.c && bitFloor(this.e / LOG_BASE) > this.c.length - 2; + }; + + + /* + * Return true if the value of this BigNumber is less than the value of BigNumber(y, b), + * otherwise return false. + */ + P.isLessThan = P.lt = function (y, b) { + return compare(this, new BigNumber(y, b)) < 0; + }; + + + /* + * Return true if the value of this BigNumber is less than or equal to the value of + * BigNumber(y, b), otherwise return false. + */ + P.isLessThanOrEqualTo = P.lte = function (y, b) { + return (b = compare(this, new BigNumber(y, b))) === -1 || b === 0; + }; + + + /* + * Return true if the value of this BigNumber is NaN, otherwise return false. + */ + P.isNaN = function () { + return !this.s; + }; + + + /* + * Return true if the value of this BigNumber is negative, otherwise return false. + */ + P.isNegative = function () { + return this.s < 0; + }; + + + /* + * Return true if the value of this BigNumber is positive, otherwise return false. + */ + P.isPositive = function () { + return this.s > 0; + }; + + + /* + * Return true if the value of this BigNumber is 0 or -0, otherwise return false. + */ + P.isZero = function () { + return !!this.c && this.c[0] == 0; + }; + + + /* + * n - 0 = n + * n - N = N + * n - I = -I + * 0 - n = -n + * 0 - 0 = 0 + * 0 - N = N + * 0 - I = -I + * N - n = N + * N - 0 = N + * N - N = N + * N - I = N + * I - n = I + * I - 0 = I + * I - N = N + * I - I = N + * + * Return a new BigNumber whose value is the value of this BigNumber minus the value of + * BigNumber(y, b). + */ + P.minus = function (y, b) { + var i, j, t, xLTy, + x = this, + a = x.s; + + y = new BigNumber(y, b); + b = y.s; + + // Either NaN? + if (!a || !b) return new BigNumber(NaN); + + // Signs differ? + if (a != b) { + y.s = -b; + return x.plus(y); + } + + var xe = x.e / LOG_BASE, + ye = y.e / LOG_BASE, + xc = x.c, + yc = y.c; + + if (!xe || !ye) { + + // Either Infinity? + if (!xc || !yc) return xc ? (y.s = -b, y) : new BigNumber(yc ? x : NaN); + + // Either zero? + if (!xc[0] || !yc[0]) { + + // Return y if y is non-zero, x if x is non-zero, or zero if both are zero. + return yc[0] ? (y.s = -b, y) : new BigNumber(xc[0] ? x : + + // IEEE 754 (2008) 6.3: n - n = -0 when rounding to -Infinity + ROUNDING_MODE == 3 ? -0 : 0); + } + } + + xe = bitFloor(xe); + ye = bitFloor(ye); + xc = xc.slice(); + + // Determine which is the bigger number. + if (a = xe - ye) { + + if (xLTy = a < 0) { + a = -a; + t = xc; + } else { + ye = xe; + t = yc; + } + + t.reverse(); + + // Prepend zeros to equalise exponents. + for (b = a; b--; t.push(0)); + t.reverse(); + } else { + + // Exponents equal. Check digit by digit. + j = (xLTy = (a = xc.length) < (b = yc.length)) ? a : b; + + for (a = b = 0; b < j; b++) { + + if (xc[b] != yc[b]) { + xLTy = xc[b] < yc[b]; + break; + } + } + } + + // x < y? Point xc to the array of the bigger number. + if (xLTy) { + t = xc; + xc = yc; + yc = t; + y.s = -y.s; + } + + b = (j = yc.length) - (i = xc.length); + + // Append zeros to xc if shorter. + // No need to add zeros to yc if shorter as subtract only needs to start at yc.length. + if (b > 0) for (; b--; xc[i++] = 0); + b = BASE - 1; + + // Subtract yc from xc. + for (; j > a;) { + + if (xc[--j] < yc[j]) { + for (i = j; i && !xc[--i]; xc[i] = b); + --xc[i]; + xc[j] += BASE; + } + + xc[j] -= yc[j]; + } + + // Remove leading zeros and adjust exponent accordingly. + for (; xc[0] == 0; xc.splice(0, 1), --ye); + + // Zero? + if (!xc[0]) { + + // Following IEEE 754 (2008) 6.3, + // n - n = +0 but n - n = -0 when rounding towards -Infinity. + y.s = ROUNDING_MODE == 3 ? -1 : 1; + y.c = [y.e = 0]; + return y; + } + + // No need to check for Infinity as +x - +y != Infinity && -x - -y != Infinity + // for finite x and y. + return normalise(y, xc, ye); + }; + + + /* + * n % 0 = N + * n % N = N + * n % I = n + * 0 % n = 0 + * -0 % n = -0 + * 0 % 0 = N + * 0 % N = N + * 0 % I = 0 + * N % n = N + * N % 0 = N + * N % N = N + * N % I = N + * I % n = N + * I % 0 = N + * I % N = N + * I % I = N + * + * Return a new BigNumber whose value is the value of this BigNumber modulo the value of + * BigNumber(y, b). The result depends on the value of MODULO_MODE. + */ + P.modulo = P.mod = function (y, b) { + var q, s, + x = this; + + y = new BigNumber(y, b); + + // Return NaN if x is Infinity or NaN, or y is NaN or zero. + if (!x.c || !y.s || y.c && !y.c[0]) { + return new BigNumber(NaN); + + // Return x if y is Infinity or x is zero. + } else if (!y.c || x.c && !x.c[0]) { + return new BigNumber(x); + } + + if (MODULO_MODE == 9) { + + // Euclidian division: q = sign(y) * floor(x / abs(y)) + // r = x - qy where 0 <= r < abs(y) + s = y.s; + y.s = 1; + q = div(x, y, 0, 3); + y.s = s; + q.s *= s; + } else { + q = div(x, y, 0, MODULO_MODE); + } + + y = x.minus(q.times(y)); + + // To match JavaScript %, ensure sign of zero is sign of dividend. + if (!y.c[0] && MODULO_MODE == 1) y.s = x.s; + + return y; + }; + + + /* + * n * 0 = 0 + * n * N = N + * n * I = I + * 0 * n = 0 + * 0 * 0 = 0 + * 0 * N = N + * 0 * I = N + * N * n = N + * N * 0 = N + * N * N = N + * N * I = N + * I * n = I + * I * 0 = N + * I * N = N + * I * I = I + * + * Return a new BigNumber whose value is the value of this BigNumber multiplied by the value + * of BigNumber(y, b). + */ + P.multipliedBy = P.times = function (y, b) { + var c, e, i, j, k, m, xcL, xlo, xhi, ycL, ylo, yhi, zc, + base, sqrtBase, + x = this, + xc = x.c, + yc = (y = new BigNumber(y, b)).c; + + // Either NaN, ±Infinity or ±0? + if (!xc || !yc || !xc[0] || !yc[0]) { + + // Return NaN if either is NaN, or one is 0 and the other is Infinity. + if (!x.s || !y.s || xc && !xc[0] && !yc || yc && !yc[0] && !xc) { + y.c = y.e = y.s = null; + } else { + y.s *= x.s; + + // Return ±Infinity if either is ±Infinity. + if (!xc || !yc) { + y.c = y.e = null; + + // Return ±0 if either is ±0. + } else { + y.c = [0]; + y.e = 0; + } + } + + return y; + } + + e = bitFloor(x.e / LOG_BASE) + bitFloor(y.e / LOG_BASE); + y.s *= x.s; + xcL = xc.length; + ycL = yc.length; + + // Ensure xc points to longer array and xcL to its length. + if (xcL < ycL) { + zc = xc; + xc = yc; + yc = zc; + i = xcL; + xcL = ycL; + ycL = i; + } + + // Initialise the result array with zeros. + for (i = xcL + ycL, zc = []; i--; zc.push(0)); + + base = BASE; + sqrtBase = SQRT_BASE; + + for (i = ycL; --i >= 0;) { + c = 0; + ylo = yc[i] % sqrtBase; + yhi = yc[i] / sqrtBase | 0; + + for (k = xcL, j = i + k; j > i;) { + xlo = xc[--k] % sqrtBase; + xhi = xc[k] / sqrtBase | 0; + m = yhi * xlo + xhi * ylo; + xlo = ylo * xlo + ((m % sqrtBase) * sqrtBase) + zc[j] + c; + c = (xlo / base | 0) + (m / sqrtBase | 0) + yhi * xhi; + zc[j--] = xlo % base; + } + + zc[j] = c; + } + + if (c) { + ++e; + } else { + zc.splice(0, 1); + } + + return normalise(y, zc, e); + }; + + + /* + * Return a new BigNumber whose value is the value of this BigNumber negated, + * i.e. multiplied by -1. + */ + P.negated = function () { + var x = new BigNumber(this); + x.s = -x.s || null; + return x; + }; + + + /* + * n + 0 = n + * n + N = N + * n + I = I + * 0 + n = n + * 0 + 0 = 0 + * 0 + N = N + * 0 + I = I + * N + n = N + * N + 0 = N + * N + N = N + * N + I = N + * I + n = I + * I + 0 = I + * I + N = N + * I + I = I + * + * Return a new BigNumber whose value is the value of this BigNumber plus the value of + * BigNumber(y, b). + */ + P.plus = function (y, b) { + var t, + x = this, + a = x.s; + + y = new BigNumber(y, b); + b = y.s; + + // Either NaN? + if (!a || !b) return new BigNumber(NaN); + + // Signs differ? + if (a != b) { + y.s = -b; + return x.minus(y); + } + + var xe = x.e / LOG_BASE, + ye = y.e / LOG_BASE, + xc = x.c, + yc = y.c; + + if (!xe || !ye) { + + // Return ±Infinity if either ±Infinity. + if (!xc || !yc) return new BigNumber(a / 0); + + // Either zero? + // Return y if y is non-zero, x if x is non-zero, or zero if both are zero. + if (!xc[0] || !yc[0]) return yc[0] ? y : new BigNumber(xc[0] ? x : a * 0); + } + + xe = bitFloor(xe); + ye = bitFloor(ye); + xc = xc.slice(); + + // Prepend zeros to equalise exponents. Faster to use reverse then do unshifts. + if (a = xe - ye) { + if (a > 0) { + ye = xe; + t = yc; + } else { + a = -a; + t = xc; + } + + t.reverse(); + for (; a--; t.push(0)); + t.reverse(); + } + + a = xc.length; + b = yc.length; + + // Point xc to the longer array, and b to the shorter length. + if (a - b < 0) { + t = yc; + yc = xc; + xc = t; + b = a; + } + + // Only start adding at yc.length - 1 as the further digits of xc can be ignored. + for (a = 0; b;) { + a = (xc[--b] = xc[b] + yc[b] + a) / BASE | 0; + xc[b] = BASE === xc[b] ? 0 : xc[b] % BASE; + } + + if (a) { + xc = [a].concat(xc); + ++ye; + } + + // No need to check for zero, as +x + +y != 0 && -x + -y != 0 + // ye = MAX_EXP + 1 possible + return normalise(y, xc, ye); + }; + + + /* + * If sd is undefined or null or true or false, return the number of significant digits of + * the value of this BigNumber, or null if the value of this BigNumber is ±Infinity or NaN. + * If sd is true include integer-part trailing zeros in the count. + * + * Otherwise, if sd is a number, return a new BigNumber whose value is the value of this + * BigNumber rounded to a maximum of sd significant digits using rounding mode rm, or + * ROUNDING_MODE if rm is omitted. + * + * sd {number|boolean} number: significant digits: integer, 1 to MAX inclusive. + * boolean: whether to count integer-part trailing zeros: true or false. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {sd|rm}' + */ + P.precision = P.sd = function (sd, rm) { + var c, n, v, + x = this; + + if (sd != null && sd !== !!sd) { + intCheck(sd, 1, MAX); + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); + + return round(new BigNumber(x), sd, rm); + } + + if (!(c = x.c)) return null; + v = c.length - 1; + n = v * LOG_BASE + 1; + + if (v = c[v]) { + + // Subtract the number of trailing zeros of the last element. + for (; v % 10 == 0; v /= 10, n--); + + // Add the number of digits of the first element. + for (v = c[0]; v >= 10; v /= 10, n++); + } + + if (sd && x.e + 1 > n) n = x.e + 1; + + return n; + }; + + + /* + * Return a new BigNumber whose value is the value of this BigNumber shifted by k places + * (powers of 10). Shift to the right if n > 0, and to the left if n < 0. + * + * k {number} Integer, -MAX_SAFE_INTEGER to MAX_SAFE_INTEGER inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {k}' + */ + P.shiftedBy = function (k) { + intCheck(k, -MAX_SAFE_INTEGER, MAX_SAFE_INTEGER); + return this.times('1e' + k); + }; + + + /* + * sqrt(-n) = N + * sqrt(N) = N + * sqrt(-I) = N + * sqrt(I) = I + * sqrt(0) = 0 + * sqrt(-0) = -0 + * + * Return a new BigNumber whose value is the square root of the value of this BigNumber, + * rounded according to DECIMAL_PLACES and ROUNDING_MODE. + */ + P.squareRoot = P.sqrt = function () { + var m, n, r, rep, t, + x = this, + c = x.c, + s = x.s, + e = x.e, + dp = DECIMAL_PLACES + 4, + half = new BigNumber('0.5'); + + // Negative/NaN/Infinity/zero? + if (s !== 1 || !c || !c[0]) { + return new BigNumber(!s || s < 0 && (!c || c[0]) ? NaN : c ? x : 1 / 0); + } + + // Initial estimate. + s = Math.sqrt(+valueOf(x)); + + // Math.sqrt underflow/overflow? + // Pass x to Math.sqrt as integer, then adjust the exponent of the result. + if (s == 0 || s == 1 / 0) { + n = coeffToString(c); + if ((n.length + e) % 2 == 0) n += '0'; + s = Math.sqrt(+n); + e = bitFloor((e + 1) / 2) - (e < 0 || e % 2); + + if (s == 1 / 0) { + n = '5e' + e; + } else { + n = s.toExponential(); + n = n.slice(0, n.indexOf('e') + 1) + e; + } + + r = new BigNumber(n); + } else { + r = new BigNumber(s + ''); + } + + // Check for zero. + // r could be zero if MIN_EXP is changed after the this value was created. + // This would cause a division by zero (x/t) and hence Infinity below, which would cause + // coeffToString to throw. + if (r.c[0]) { + e = r.e; + s = e + dp; + if (s < 3) s = 0; + + // Newton-Raphson iteration. + for (; ;) { + t = r; + r = half.times(t.plus(div(x, t, dp, 1))); + + if (coeffToString(t.c).slice(0, s) === (n = coeffToString(r.c)).slice(0, s)) { + + // The exponent of r may here be one less than the final result exponent, + // e.g 0.0009999 (e-4) --> 0.001 (e-3), so adjust s so the rounding digits + // are indexed correctly. + if (r.e < e) --s; + n = n.slice(s - 3, s + 1); + + // The 4th rounding digit may be in error by -1 so if the 4 rounding digits + // are 9999 or 4999 (i.e. approaching a rounding boundary) continue the + // iteration. + if (n == '9999' || !rep && n == '4999') { + + // On the first iteration only, check to see if rounding up gives the + // exact result as the nines may infinitely repeat. + if (!rep) { + round(t, t.e + DECIMAL_PLACES + 2, 0); + + if (t.times(t).eq(x)) { + r = t; + break; + } + } + + dp += 4; + s += 4; + rep = 1; + } else { + + // If rounding digits are null, 0{0,4} or 50{0,3}, check for exact + // result. If not, then there are further digits and m will be truthy. + if (!+n || !+n.slice(1) && n.charAt(0) == '5') { + + // Truncate to the first rounding digit. + round(r, r.e + DECIMAL_PLACES + 2, 1); + m = !r.times(r).eq(x); + } + + break; + } + } + } + } + + return round(r, r.e + DECIMAL_PLACES + 1, ROUNDING_MODE, m); + }; + + + /* + * Return a string representing the value of this BigNumber in exponential notation and + * rounded using ROUNDING_MODE to dp fixed decimal places. + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + */ + P.toExponential = function (dp, rm) { + if (dp != null) { + intCheck(dp, 0, MAX); + dp++; + } + return format(this, dp, rm, 1); + }; + + + /* + * Return a string representing the value of this BigNumber in fixed-point notation rounding + * to dp fixed decimal places using rounding mode rm, or ROUNDING_MODE if rm is omitted. + * + * Note: as with JavaScript's number type, (-0).toFixed(0) is '0', + * but e.g. (-0.00001).toFixed(0) is '-0'. + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + */ + P.toFixed = function (dp, rm) { + if (dp != null) { + intCheck(dp, 0, MAX); + dp = dp + this.e + 1; + } + return format(this, dp, rm); + }; + + + /* + * Return a string representing the value of this BigNumber in fixed-point notation rounded + * using rm or ROUNDING_MODE to dp decimal places, and formatted according to the properties + * of the format or FORMAT object (see BigNumber.set). + * + * The formatting object may contain some or all of the properties shown below. + * + * FORMAT = { + * prefix: '', + * groupSize: 3, + * secondaryGroupSize: 0, + * groupSeparator: ',', + * decimalSeparator: '.', + * fractionGroupSize: 0, + * fractionGroupSeparator: '\xA0', // non-breaking space + * suffix: '' + * }; + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * [format] {object} Formatting options. See FORMAT pbject above. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + * '[BigNumber Error] Argument not an object: {format}' + */ + P.toFormat = function (dp, rm, format) { + var str, + x = this; + + if (format == null) { + if (dp != null && rm && typeof rm == 'object') { + format = rm; + rm = null; + } else if (dp && typeof dp == 'object') { + format = dp; + dp = rm = null; + } else { + format = FORMAT; + } + } else if (typeof format != 'object') { + throw Error + (bignumberError + 'Argument not an object: ' + format); + } + + str = x.toFixed(dp, rm); + + if (x.c) { + var i, + arr = str.split('.'), + g1 = +format.groupSize, + g2 = +format.secondaryGroupSize, + groupSeparator = format.groupSeparator || '', + intPart = arr[0], + fractionPart = arr[1], + isNeg = x.s < 0, + intDigits = isNeg ? intPart.slice(1) : intPart, + len = intDigits.length; + + if (g2) { + i = g1; + g1 = g2; + g2 = i; + len -= i; + } + + if (g1 > 0 && len > 0) { + i = len % g1 || g1; + intPart = intDigits.substr(0, i); + for (; i < len; i += g1) intPart += groupSeparator + intDigits.substr(i, g1); + if (g2 > 0) intPart += groupSeparator + intDigits.slice(i); + if (isNeg) intPart = '-' + intPart; + } + + str = fractionPart + ? intPart + (format.decimalSeparator || '') + ((g2 = +format.fractionGroupSize) + ? fractionPart.replace(new RegExp('\\d{' + g2 + '}\\B', 'g'), + '$&' + (format.fractionGroupSeparator || '')) + : fractionPart) + : intPart; + } + + return (format.prefix || '') + str + (format.suffix || ''); + }; + + + /* + * Return an array of two BigNumbers representing the value of this BigNumber as a simple + * fraction with an integer numerator and an integer denominator. + * The denominator will be a positive non-zero value less than or equal to the specified + * maximum denominator. If a maximum denominator is not specified, the denominator will be + * the lowest value necessary to represent the number exactly. + * + * [md] {number|string|BigNumber} Integer >= 1, or Infinity. The maximum denominator. + * + * '[BigNumber Error] Argument {not an integer|out of range} : {md}' + */ + P.toFraction = function (md) { + var d, d0, d1, d2, e, exp, n, n0, n1, q, r, s, + x = this, + xc = x.c; + + if (md != null) { + n = new BigNumber(md); + + // Throw if md is less than one or is not an integer, unless it is Infinity. + if (!n.isInteger() && (n.c || n.s !== 1) || n.lt(ONE)) { + throw Error + (bignumberError + 'Argument ' + + (n.isInteger() ? 'out of range: ' : 'not an integer: ') + valueOf(n)); + } + } + + if (!xc) return new BigNumber(x); + + d = new BigNumber(ONE); + n1 = d0 = new BigNumber(ONE); + d1 = n0 = new BigNumber(ONE); + s = coeffToString(xc); + + // Determine initial denominator. + // d is a power of 10 and the minimum max denominator that specifies the value exactly. + e = d.e = s.length - x.e - 1; + d.c[0] = POWS_TEN[(exp = e % LOG_BASE) < 0 ? LOG_BASE + exp : exp]; + md = !md || n.comparedTo(d) > 0 ? (e > 0 ? d : n1) : n; + + exp = MAX_EXP; + MAX_EXP = 1 / 0; + n = new BigNumber(s); + + // n0 = d1 = 0 + n0.c[0] = 0; + + for (; ;) { + q = div(n, d, 0, 1); + d2 = d0.plus(q.times(d1)); + if (d2.comparedTo(md) == 1) break; + d0 = d1; + d1 = d2; + n1 = n0.plus(q.times(d2 = n1)); + n0 = d2; + d = n.minus(q.times(d2 = d)); + n = d2; + } + + d2 = div(md.minus(d0), d1, 0, 1); + n0 = n0.plus(d2.times(n1)); + d0 = d0.plus(d2.times(d1)); + n0.s = n1.s = x.s; + e = e * 2; + + // Determine which fraction is closer to x, n0/d0 or n1/d1 + r = div(n1, d1, e, ROUNDING_MODE).minus(x).abs().comparedTo( + div(n0, d0, e, ROUNDING_MODE).minus(x).abs()) < 1 ? [n1, d1] : [n0, d0]; + + MAX_EXP = exp; + + return r; + }; + + + /* + * Return the value of this BigNumber converted to a number primitive. + */ + P.toNumber = function () { + return +valueOf(this); + }; + + + /* + * Return a string representing the value of this BigNumber rounded to sd significant digits + * using rounding mode rm or ROUNDING_MODE. If sd is less than the number of digits + * necessary to represent the integer part of the value in fixed-point notation, then use + * exponential notation. + * + * [sd] {number} Significant digits. Integer, 1 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {sd|rm}' + */ + P.toPrecision = function (sd, rm) { + if (sd != null) intCheck(sd, 1, MAX); + return format(this, sd, rm, 2); + }; + + + /* + * Return a string representing the value of this BigNumber in base b, or base 10 if b is + * omitted. If a base is specified, including base 10, round according to DECIMAL_PLACES and + * ROUNDING_MODE. If a base is not specified, and this BigNumber has a positive exponent + * that is equal to or greater than TO_EXP_POS, or a negative exponent equal to or less than + * TO_EXP_NEG, return exponential notation. + * + * [b] {number} Integer, 2 to ALPHABET.length inclusive. + * + * '[BigNumber Error] Base {not a primitive number|not an integer|out of range}: {b}' + */ + P.toString = function (b) { + var str, + n = this, + s = n.s, + e = n.e; + + // Infinity or NaN? + if (e === null) { + if (s) { + str = 'Infinity'; + if (s < 0) str = '-' + str; + } else { + str = 'NaN'; + } + } else { + if (b == null) { + str = e <= TO_EXP_NEG || e >= TO_EXP_POS + ? toExponential(coeffToString(n.c), e) + : toFixedPoint(coeffToString(n.c), e, '0'); + } else if (b === 10 && alphabetHasNormalDecimalDigits) { + n = round(new BigNumber(n), DECIMAL_PLACES + e + 1, ROUNDING_MODE); + str = toFixedPoint(coeffToString(n.c), n.e, '0'); + } else { + intCheck(b, 2, ALPHABET.length, 'Base'); + str = convertBase(toFixedPoint(coeffToString(n.c), e, '0'), 10, b, s, true); + } + + if (s < 0 && n.c[0]) str = '-' + str; + } + + return str; + }; + + + /* + * Return as toString, but do not accept a base argument, and include the minus sign for + * negative zero. + */ + P.valueOf = P.toJSON = function () { + return valueOf(this); + }; + + + P._isBigNumber = true; + + if (configObject != null) BigNumber.set(configObject); + + return BigNumber; + } + + + // PRIVATE HELPER FUNCTIONS + + // These functions don't need access to variables, + // e.g. DECIMAL_PLACES, in the scope of the `clone` function above. + + + function bitFloor(n) { + var i = n | 0; + return n > 0 || n === i ? i : i - 1; + } + + + // Return a coefficient array as a string of base 10 digits. + function coeffToString(a) { + var s, z, + i = 1, + j = a.length, + r = a[0] + ''; + + for (; i < j;) { + s = a[i++] + ''; + z = LOG_BASE - s.length; + for (; z--; s = '0' + s); + r += s; + } + + // Determine trailing zeros. + for (j = r.length; r.charCodeAt(--j) === 48;); + + return r.slice(0, j + 1 || 1); + } + + + // Compare the value of BigNumbers x and y. + function compare(x, y) { + var a, b, + xc = x.c, + yc = y.c, + i = x.s, + j = y.s, + k = x.e, + l = y.e; + + // Either NaN? + if (!i || !j) return null; + + a = xc && !xc[0]; + b = yc && !yc[0]; + + // Either zero? + if (a || b) return a ? b ? 0 : -j : i; + + // Signs differ? + if (i != j) return i; + + a = i < 0; + b = k == l; + + // Either Infinity? + if (!xc || !yc) return b ? 0 : !xc ^ a ? 1 : -1; + + // Compare exponents. + if (!b) return k > l ^ a ? 1 : -1; + + j = (k = xc.length) < (l = yc.length) ? k : l; + + // Compare digit by digit. + for (i = 0; i < j; i++) if (xc[i] != yc[i]) return xc[i] > yc[i] ^ a ? 1 : -1; + + // Compare lengths. + return k == l ? 0 : k > l ^ a ? 1 : -1; + } + + + /* + * Check that n is a primitive number, an integer, and in range, otherwise throw. + */ + function intCheck(n, min, max, name) { + if (n < min || n > max || n !== mathfloor(n)) { + throw Error + (bignumberError + (name || 'Argument') + (typeof n == 'number' + ? n < min || n > max ? ' out of range: ' : ' not an integer: ' + : ' not a primitive number: ') + String(n)); + } + } + + + // Assumes finite n. + function isOdd(n) { + var k = n.c.length - 1; + return bitFloor(n.e / LOG_BASE) == k && n.c[k] % 2 != 0; + } + + + function toExponential(str, e) { + return (str.length > 1 ? str.charAt(0) + '.' + str.slice(1) : str) + + (e < 0 ? 'e' : 'e+') + e; + } + + + function toFixedPoint(str, e, z) { + var len, zs; + + // Negative exponent? + if (e < 0) { + + // Prepend zeros. + for (zs = z + '.'; ++e; zs += z); + str = zs + str; + + // Positive exponent + } else { + len = str.length; + + // Append zeros. + if (++e > len) { + for (zs = z, e -= len; --e; zs += z); + str += zs; + } else if (e < len) { + str = str.slice(0, e) + '.' + str.slice(e); + } + } + + return str; + } + + + // EXPORT + + + BigNumber = clone(); + BigNumber['default'] = BigNumber.BigNumber = BigNumber; + + // AMD. + if (typeof define == 'function' && define.amd) { + define(function () { return BigNumber; }); + + // Node.js and other environments that support module.exports. + } else if ( true && module.exports) { + module.exports = BigNumber; + + // Browser. + } else { + if (!globalObject) { + globalObject = typeof self != 'undefined' && self ? self : window; + } + + globalObject.BigNumber = BigNumber; + } +})(this); + + /***/ }), /***/ 4691: @@ -51001,6 +54580,55 @@ function expand(str, isTop) { +/***/ }), + +/***/ 9732: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +/*jshint node:true */ + +var Buffer = (__nccwpck_require__(181).Buffer); // browserify +var SlowBuffer = (__nccwpck_require__(181).SlowBuffer); + +module.exports = bufferEq; + +function bufferEq(a, b) { + + // shortcutting on type is necessary for correctness + if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) { + return false; + } + + // buffer sizes should be well-known information, so despite this + // shortcutting, it doesn't leak any information about the *contents* of the + // buffers. + if (a.length !== b.length) { + return false; + } + + var c = 0; + for (var i = 0; i < a.length; i++) { + /*jshint bitwise:false */ + c |= a[i] ^ b[i]; // XOR + } + return c === 0; +} + +bufferEq.install = function() { + Buffer.prototype.equal = SlowBuffer.prototype.equal = function equal(that) { + return bufferEq(this, that); + }; +}; + +var origBufEqual = Buffer.prototype.equal; +var origSlowBufEqual = SlowBuffer.prototype.equal; +bufferEq.restore = function() { + Buffer.prototype.equal = origBufEqual; + SlowBuffer.prototype.equal = origSlowBufEqual; +}; + + /***/ }), /***/ 5630: @@ -51236,6 +54864,850 @@ var isArray = Array.isArray || function (xs) { }; +/***/ }), + +/***/ 6110: +/***/ ((module, exports, __nccwpck_require__) => { + +/* eslint-env browser */ + +/** + * This is the web browser implementation of `debug()`. + */ + +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.storage = localstorage(); +exports.destroy = (() => { + let warned = false; + + return () => { + if (!warned) { + warned = true; + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + }; +})(); + +/** + * Colors. + */ + +exports.colors = [ + '#0000CC', + '#0000FF', + '#0033CC', + '#0033FF', + '#0066CC', + '#0066FF', + '#0099CC', + '#0099FF', + '#00CC00', + '#00CC33', + '#00CC66', + '#00CC99', + '#00CCCC', + '#00CCFF', + '#3300CC', + '#3300FF', + '#3333CC', + '#3333FF', + '#3366CC', + '#3366FF', + '#3399CC', + '#3399FF', + '#33CC00', + '#33CC33', + '#33CC66', + '#33CC99', + '#33CCCC', + '#33CCFF', + '#6600CC', + '#6600FF', + '#6633CC', + '#6633FF', + '#66CC00', + '#66CC33', + '#9900CC', + '#9900FF', + '#9933CC', + '#9933FF', + '#99CC00', + '#99CC33', + '#CC0000', + '#CC0033', + '#CC0066', + '#CC0099', + '#CC00CC', + '#CC00FF', + '#CC3300', + '#CC3333', + '#CC3366', + '#CC3399', + '#CC33CC', + '#CC33FF', + '#CC6600', + '#CC6633', + '#CC9900', + '#CC9933', + '#CCCC00', + '#CCCC33', + '#FF0000', + '#FF0033', + '#FF0066', + '#FF0099', + '#FF00CC', + '#FF00FF', + '#FF3300', + '#FF3333', + '#FF3366', + '#FF3399', + '#FF33CC', + '#FF33FF', + '#FF6600', + '#FF6633', + '#FF9900', + '#FF9933', + '#FFCC00', + '#FFCC33' +]; + +/** + * Currently only WebKit-based Web Inspectors, Firefox >= v31, + * and the Firebug extension (any Firefox version) are known + * to support "%c" CSS customizations. + * + * TODO: add a `localStorage` variable to explicitly enable/disable colors + */ + +// eslint-disable-next-line complexity +function useColors() { + // NB: In an Electron preload script, document will be defined but not fully + // initialized. Since we know we're in Chrome, we'll just detect this case + // explicitly + if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) { + return true; + } + + // Internet Explorer and Edge do not support colors. + if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { + return false; + } + + // Is webkit? http://stackoverflow.com/a/16459606/376773 + // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 + return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) || + // Is firebug? http://stackoverflow.com/a/398120/376773 + (typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) || + // Is firefox >= v31? + // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31) || + // Double check webkit in userAgent just in case we are in a worker + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)); +} + +/** + * Colorize log arguments if enabled. + * + * @api public + */ + +function formatArgs(args) { + args[0] = (this.useColors ? '%c' : '') + + this.namespace + + (this.useColors ? ' %c' : ' ') + + args[0] + + (this.useColors ? '%c ' : ' ') + + '+' + module.exports.humanize(this.diff); + + if (!this.useColors) { + return; + } + + const c = 'color: ' + this.color; + args.splice(1, 0, c, 'color: inherit'); + + // The final "%c" is somewhat tricky, because there could be other + // arguments passed either before or after the %c, so we need to + // figure out the correct index to insert the CSS into + let index = 0; + let lastC = 0; + args[0].replace(/%[a-zA-Z%]/g, match => { + if (match === '%%') { + return; + } + index++; + if (match === '%c') { + // We only are interested in the *last* %c + // (the user may have provided their own) + lastC = index; + } + }); + + args.splice(lastC, 0, c); +} + +/** + * Invokes `console.debug()` when available. + * No-op when `console.debug` is not a "function". + * If `console.debug` is not available, falls back + * to `console.log`. + * + * @api public + */ +exports.log = console.debug || console.log || (() => {}); + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + try { + if (namespaces) { + exports.storage.setItem('debug', namespaces); + } else { + exports.storage.removeItem('debug'); + } + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ +function load() { + let r; + try { + r = exports.storage.getItem('debug'); + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } + + // If debug isn't set in LS, and we're in Electron, try to load $DEBUG + if (!r && typeof process !== 'undefined' && 'env' in process) { + r = process.env.DEBUG; + } + + return r; +} + +/** + * Localstorage attempts to return the localstorage. + * + * This is necessary because safari throws + * when a user disables cookies/localstorage + * and you attempt to access it. + * + * @return {LocalStorage} + * @api private + */ + +function localstorage() { + try { + // TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context + // The Browser also has localStorage in the global context. + return localStorage; + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} + +module.exports = __nccwpck_require__(897)(exports); + +const {formatters} = module.exports; + +/** + * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. + */ + +formatters.j = function (v) { + try { + return JSON.stringify(v); + } catch (error) { + return '[UnexpectedJSONParseError]: ' + error.message; + } +}; + + +/***/ }), + +/***/ 897: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + + +/** + * This is the common logic for both the Node.js and web browser + * implementations of `debug()`. + */ + +function setup(env) { + createDebug.debug = createDebug; + createDebug.default = createDebug; + createDebug.coerce = coerce; + createDebug.disable = disable; + createDebug.enable = enable; + createDebug.enabled = enabled; + createDebug.humanize = __nccwpck_require__(744); + createDebug.destroy = destroy; + + Object.keys(env).forEach(key => { + createDebug[key] = env[key]; + }); + + /** + * The currently active debug mode names, and names to skip. + */ + + createDebug.names = []; + createDebug.skips = []; + + /** + * Map of special "%n" handling functions, for the debug "format" argument. + * + * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". + */ + createDebug.formatters = {}; + + /** + * Selects a color for a debug namespace + * @param {String} namespace The namespace string for the debug instance to be colored + * @return {Number|String} An ANSI color code for the given namespace + * @api private + */ + function selectColor(namespace) { + let hash = 0; + + for (let i = 0; i < namespace.length; i++) { + hash = ((hash << 5) - hash) + namespace.charCodeAt(i); + hash |= 0; // Convert to 32bit integer + } + + return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; + } + createDebug.selectColor = selectColor; + + /** + * Create a debugger with the given `namespace`. + * + * @param {String} namespace + * @return {Function} + * @api public + */ + function createDebug(namespace) { + let prevTime; + let enableOverride = null; + let namespacesCache; + let enabledCache; + + function debug(...args) { + // Disabled? + if (!debug.enabled) { + return; + } + + const self = debug; + + // Set `diff` timestamp + const curr = Number(new Date()); + const ms = curr - (prevTime || curr); + self.diff = ms; + self.prev = prevTime; + self.curr = curr; + prevTime = curr; + + args[0] = createDebug.coerce(args[0]); + + if (typeof args[0] !== 'string') { + // Anything else let's inspect with %O + args.unshift('%O'); + } + + // Apply any `formatters` transformations + let index = 0; + args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => { + // If we encounter an escaped % then don't increase the array index + if (match === '%%') { + return '%'; + } + index++; + const formatter = createDebug.formatters[format]; + if (typeof formatter === 'function') { + const val = args[index]; + match = formatter.call(self, val); + + // Now we need to remove `args[index]` since it's inlined in the `format` + args.splice(index, 1); + index--; + } + return match; + }); + + // Apply env-specific formatting (colors, etc.) + createDebug.formatArgs.call(self, args); + + const logFn = self.log || createDebug.log; + logFn.apply(self, args); + } + + debug.namespace = namespace; + debug.useColors = createDebug.useColors(); + debug.color = createDebug.selectColor(namespace); + debug.extend = extend; + debug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release. + + Object.defineProperty(debug, 'enabled', { + enumerable: true, + configurable: false, + get: () => { + if (enableOverride !== null) { + return enableOverride; + } + if (namespacesCache !== createDebug.namespaces) { + namespacesCache = createDebug.namespaces; + enabledCache = createDebug.enabled(namespace); + } + + return enabledCache; + }, + set: v => { + enableOverride = v; + } + }); + + // Env-specific initialization logic for debug instances + if (typeof createDebug.init === 'function') { + createDebug.init(debug); + } + + return debug; + } + + function extend(namespace, delimiter) { + const newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace); + newDebug.log = this.log; + return newDebug; + } + + /** + * Enables a debug mode by namespaces. This can include modes + * separated by a colon and wildcards. + * + * @param {String} namespaces + * @api public + */ + function enable(namespaces) { + createDebug.save(namespaces); + createDebug.namespaces = namespaces; + + createDebug.names = []; + createDebug.skips = []; + + let i; + const split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/); + const len = split.length; + + for (i = 0; i < len; i++) { + if (!split[i]) { + // ignore empty strings + continue; + } + + namespaces = split[i].replace(/\*/g, '.*?'); + + if (namespaces[0] === '-') { + createDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$')); + } else { + createDebug.names.push(new RegExp('^' + namespaces + '$')); + } + } + } + + /** + * Disable debug output. + * + * @return {String} namespaces + * @api public + */ + function disable() { + const namespaces = [ + ...createDebug.names.map(toNamespace), + ...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace) + ].join(','); + createDebug.enable(''); + return namespaces; + } + + /** + * Returns true if the given mode name is enabled, false otherwise. + * + * @param {String} name + * @return {Boolean} + * @api public + */ + function enabled(name) { + if (name[name.length - 1] === '*') { + return true; + } + + let i; + let len; + + for (i = 0, len = createDebug.skips.length; i < len; i++) { + if (createDebug.skips[i].test(name)) { + return false; + } + } + + for (i = 0, len = createDebug.names.length; i < len; i++) { + if (createDebug.names[i].test(name)) { + return true; + } + } + + return false; + } + + /** + * Convert regexp to namespace + * + * @param {RegExp} regxep + * @return {String} namespace + * @api private + */ + function toNamespace(regexp) { + return regexp.toString() + .substring(2, regexp.toString().length - 2) + .replace(/\.\*\?$/, '*'); + } + + /** + * Coerce `val`. + * + * @param {Mixed} val + * @return {Mixed} + * @api private + */ + function coerce(val) { + if (val instanceof Error) { + return val.stack || val.message; + } + return val; + } + + /** + * XXX DO NOT USE. This is a temporary stub function. + * XXX It WILL be removed in the next major release. + */ + function destroy() { + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + + createDebug.enable(createDebug.load()); + + return createDebug; +} + +module.exports = setup; + + +/***/ }), + +/***/ 2830: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/** + * Detect Electron renderer / nwjs process, which is node, but we should + * treat as a browser. + */ + +if (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) { + module.exports = __nccwpck_require__(6110); +} else { + module.exports = __nccwpck_require__(5108); +} + + +/***/ }), + +/***/ 5108: +/***/ ((module, exports, __nccwpck_require__) => { + +/** + * Module dependencies. + */ + +const tty = __nccwpck_require__(2018); +const util = __nccwpck_require__(9023); + +/** + * This is the Node.js implementation of `debug()`. + */ + +exports.init = init; +exports.log = log; +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.destroy = util.deprecate( + () => {}, + 'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.' +); + +/** + * Colors. + */ + +exports.colors = [6, 2, 3, 4, 5, 1]; + +try { + // Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json) + // eslint-disable-next-line import/no-extraneous-dependencies + const supportsColor = __nccwpck_require__(1450); + + if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) { + exports.colors = [ + 20, + 21, + 26, + 27, + 32, + 33, + 38, + 39, + 40, + 41, + 42, + 43, + 44, + 45, + 56, + 57, + 62, + 63, + 68, + 69, + 74, + 75, + 76, + 77, + 78, + 79, + 80, + 81, + 92, + 93, + 98, + 99, + 112, + 113, + 128, + 129, + 134, + 135, + 148, + 149, + 160, + 161, + 162, + 163, + 164, + 165, + 166, + 167, + 168, + 169, + 170, + 171, + 172, + 173, + 178, + 179, + 184, + 185, + 196, + 197, + 198, + 199, + 200, + 201, + 202, + 203, + 204, + 205, + 206, + 207, + 208, + 209, + 214, + 215, + 220, + 221 + ]; + } +} catch (error) { + // Swallow - we only care if `supports-color` is available; it doesn't have to be. +} + +/** + * Build up the default `inspectOpts` object from the environment variables. + * + * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js + */ + +exports.inspectOpts = Object.keys(process.env).filter(key => { + return /^debug_/i.test(key); +}).reduce((obj, key) => { + // Camel-case + const prop = key + .substring(6) + .toLowerCase() + .replace(/_([a-z])/g, (_, k) => { + return k.toUpperCase(); + }); + + // Coerce string value into JS value + let val = process.env[key]; + if (/^(yes|on|true|enabled)$/i.test(val)) { + val = true; + } else if (/^(no|off|false|disabled)$/i.test(val)) { + val = false; + } else if (val === 'null') { + val = null; + } else { + val = Number(val); + } + + obj[prop] = val; + return obj; +}, {}); + +/** + * Is stdout a TTY? Colored output is enabled when `true`. + */ + +function useColors() { + return 'colors' in exports.inspectOpts ? + Boolean(exports.inspectOpts.colors) : + tty.isatty(process.stderr.fd); +} + +/** + * Adds ANSI color escape codes if enabled. + * + * @api public + */ + +function formatArgs(args) { + const {namespace: name, useColors} = this; + + if (useColors) { + const c = this.color; + const colorCode = '\u001B[3' + (c < 8 ? c : '8;5;' + c); + const prefix = ` ${colorCode};1m${name} \u001B[0m`; + + args[0] = prefix + args[0].split('\n').join('\n' + prefix); + args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\u001B[0m'); + } else { + args[0] = getDate() + name + ' ' + args[0]; + } +} + +function getDate() { + if (exports.inspectOpts.hideDate) { + return ''; + } + return new Date().toISOString() + ' '; +} + +/** + * Invokes `util.format()` with the specified arguments and writes to stderr. + */ + +function log(...args) { + return process.stderr.write(util.format(...args) + '\n'); +} + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + if (namespaces) { + process.env.DEBUG = namespaces; + } else { + // If you set a process.env field to null or undefined, it gets cast to the + // string 'null' or 'undefined'. Just delete instead. + delete process.env.DEBUG; + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ + +function load() { + return process.env.DEBUG; +} + +/** + * Init logic for `debug` instances. + * + * Create a new `inspectOpts` object in case `useColors` is set + * differently for a particular `debug` instance. + */ + +function init(debug) { + debug.inspectOpts = {}; + + const keys = Object.keys(exports.inspectOpts); + for (let i = 0; i < keys.length; i++) { + debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]]; + } +} + +module.exports = __nccwpck_require__(897)(exports); + +const {formatters} = module.exports; + +/** + * Map %o to `util.inspect()`, all on a single line. + */ + +formatters.o = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts) + .split('\n') + .map(str => str.trim()) + .join(' '); +}; + +/** + * Map %O to `util.inspect()`, allowing multiple lines if needed. + */ + +formatters.O = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts); +}; + + /***/ }), /***/ 2710: @@ -51350,6 +55822,15373 @@ DelayedStream.prototype._checkIfMaxDataSizeExceeded = function() { }; +/***/ }), + +/***/ 9112: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var stream = __nccwpck_require__(6131) +var eos = __nccwpck_require__(1424) +var inherits = __nccwpck_require__(9598) +var shift = __nccwpck_require__(4633) + +var SIGNAL_FLUSH = (Buffer.from && Buffer.from !== Uint8Array.from) + ? Buffer.from([0]) + : new Buffer([0]) + +var onuncork = function(self, fn) { + if (self._corked) self.once('uncork', fn) + else fn() +} + +var autoDestroy = function (self, err) { + if (self._autoDestroy) self.destroy(err) +} + +var destroyer = function(self, end) { + return function(err) { + if (err) autoDestroy(self, err.message === 'premature close' ? null : err) + else if (end && !self._ended) self.end() + } +} + +var end = function(ws, fn) { + if (!ws) return fn() + if (ws._writableState && ws._writableState.finished) return fn() + if (ws._writableState) return ws.end(fn) + ws.end() + fn() +} + +var noop = function() {} + +var toStreams2 = function(rs) { + return new (stream.Readable)({objectMode:true, highWaterMark:16}).wrap(rs) +} + +var Duplexify = function(writable, readable, opts) { + if (!(this instanceof Duplexify)) return new Duplexify(writable, readable, opts) + stream.Duplex.call(this, opts) + + this._writable = null + this._readable = null + this._readable2 = null + + this._autoDestroy = !opts || opts.autoDestroy !== false + this._forwardDestroy = !opts || opts.destroy !== false + this._forwardEnd = !opts || opts.end !== false + this._corked = 1 // start corked + this._ondrain = null + this._drained = false + this._forwarding = false + this._unwrite = null + this._unread = null + this._ended = false + + this.destroyed = false + + if (writable) this.setWritable(writable) + if (readable) this.setReadable(readable) +} + +inherits(Duplexify, stream.Duplex) + +Duplexify.obj = function(writable, readable, opts) { + if (!opts) opts = {} + opts.objectMode = true + opts.highWaterMark = 16 + return new Duplexify(writable, readable, opts) +} + +Duplexify.prototype.cork = function() { + if (++this._corked === 1) this.emit('cork') +} + +Duplexify.prototype.uncork = function() { + if (this._corked && --this._corked === 0) this.emit('uncork') +} + +Duplexify.prototype.setWritable = function(writable) { + if (this._unwrite) this._unwrite() + + if (this.destroyed) { + if (writable && writable.destroy) writable.destroy() + return + } + + if (writable === null || writable === false) { + this.end() + return + } + + var self = this + var unend = eos(writable, {writable:true, readable:false}, destroyer(this, this._forwardEnd)) + + var ondrain = function() { + var ondrain = self._ondrain + self._ondrain = null + if (ondrain) ondrain() + } + + var clear = function() { + self._writable.removeListener('drain', ondrain) + unend() + } + + if (this._unwrite) process.nextTick(ondrain) // force a drain on stream reset to avoid livelocks + + this._writable = writable + this._writable.on('drain', ondrain) + this._unwrite = clear + + this.uncork() // always uncork setWritable +} + +Duplexify.prototype.setReadable = function(readable) { + if (this._unread) this._unread() + + if (this.destroyed) { + if (readable && readable.destroy) readable.destroy() + return + } + + if (readable === null || readable === false) { + this.push(null) + this.resume() + return + } + + var self = this + var unend = eos(readable, {writable:false, readable:true}, destroyer(this)) + + var onreadable = function() { + self._forward() + } + + var onend = function() { + self.push(null) + } + + var clear = function() { + self._readable2.removeListener('readable', onreadable) + self._readable2.removeListener('end', onend) + unend() + } + + this._drained = true + this._readable = readable + this._readable2 = readable._readableState ? readable : toStreams2(readable) + this._readable2.on('readable', onreadable) + this._readable2.on('end', onend) + this._unread = clear + + this._forward() +} + +Duplexify.prototype._read = function() { + this._drained = true + this._forward() +} + +Duplexify.prototype._forward = function() { + if (this._forwarding || !this._readable2 || !this._drained) return + this._forwarding = true + + var data + + while (this._drained && (data = shift(this._readable2)) !== null) { + if (this.destroyed) continue + this._drained = this.push(data) + } + + this._forwarding = false +} + +Duplexify.prototype.destroy = function(err, cb) { + if (!cb) cb = noop + if (this.destroyed) return cb(null) + this.destroyed = true + + var self = this + process.nextTick(function() { + self._destroy(err) + cb(null) + }) +} + +Duplexify.prototype._destroy = function(err) { + if (err) { + var ondrain = this._ondrain + this._ondrain = null + if (ondrain) ondrain(err) + else this.emit('error', err) + } + + if (this._forwardDestroy) { + if (this._readable && this._readable.destroy) this._readable.destroy() + if (this._writable && this._writable.destroy) this._writable.destroy() + } + + this.emit('close') +} + +Duplexify.prototype._write = function(data, enc, cb) { + if (this.destroyed) return + if (this._corked) return onuncork(this, this._write.bind(this, data, enc, cb)) + if (data === SIGNAL_FLUSH) return this._finish(cb) + if (!this._writable) return cb() + + if (this._writable.write(data) === false) this._ondrain = cb + else if (!this.destroyed) cb() +} + +Duplexify.prototype._finish = function(cb) { + var self = this + this.emit('preend') + onuncork(this, function() { + end(self._forwardEnd && self._writable, function() { + // haxx to not emit prefinish twice + if (self._writableState.prefinished === false) self._writableState.prefinished = true + self.emit('prefinish') + onuncork(self, cb) + }) + }) +} + +Duplexify.prototype.end = function(data, enc, cb) { + if (typeof data === 'function') return this.end(null, null, data) + if (typeof enc === 'function') return this.end(data, null, enc) + this._ended = true + if (data) this.write(data) + if (!this._writableState.ending && !this._writableState.destroyed) this.write(SIGNAL_FLUSH) + return stream.Writable.prototype.end.call(this, cb) +} + +module.exports = Duplexify + + +/***/ }), + +/***/ 325: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var Buffer = (__nccwpck_require__(3058).Buffer); + +var getParamBytesForAlg = __nccwpck_require__(5028); + +var MAX_OCTET = 0x80, + CLASS_UNIVERSAL = 0, + PRIMITIVE_BIT = 0x20, + TAG_SEQ = 0x10, + TAG_INT = 0x02, + ENCODED_TAG_SEQ = (TAG_SEQ | PRIMITIVE_BIT) | (CLASS_UNIVERSAL << 6), + ENCODED_TAG_INT = TAG_INT | (CLASS_UNIVERSAL << 6); + +function base64Url(base64) { + return base64 + .replace(/=/g, '') + .replace(/\+/g, '-') + .replace(/\//g, '_'); +} + +function signatureAsBuffer(signature) { + if (Buffer.isBuffer(signature)) { + return signature; + } else if ('string' === typeof signature) { + return Buffer.from(signature, 'base64'); + } + + throw new TypeError('ECDSA signature must be a Base64 string or a Buffer'); +} + +function derToJose(signature, alg) { + signature = signatureAsBuffer(signature); + var paramBytes = getParamBytesForAlg(alg); + + // the DER encoded param should at most be the param size, plus a padding + // zero, since due to being a signed integer + var maxEncodedParamLength = paramBytes + 1; + + var inputLength = signature.length; + + var offset = 0; + if (signature[offset++] !== ENCODED_TAG_SEQ) { + throw new Error('Could not find expected "seq"'); + } + + var seqLength = signature[offset++]; + if (seqLength === (MAX_OCTET | 1)) { + seqLength = signature[offset++]; + } + + if (inputLength - offset < seqLength) { + throw new Error('"seq" specified length of "' + seqLength + '", only "' + (inputLength - offset) + '" remaining'); + } + + if (signature[offset++] !== ENCODED_TAG_INT) { + throw new Error('Could not find expected "int" for "r"'); + } + + var rLength = signature[offset++]; + + if (inputLength - offset - 2 < rLength) { + throw new Error('"r" specified length of "' + rLength + '", only "' + (inputLength - offset - 2) + '" available'); + } + + if (maxEncodedParamLength < rLength) { + throw new Error('"r" specified length of "' + rLength + '", max of "' + maxEncodedParamLength + '" is acceptable'); + } + + var rOffset = offset; + offset += rLength; + + if (signature[offset++] !== ENCODED_TAG_INT) { + throw new Error('Could not find expected "int" for "s"'); + } + + var sLength = signature[offset++]; + + if (inputLength - offset !== sLength) { + throw new Error('"s" specified length of "' + sLength + '", expected "' + (inputLength - offset) + '"'); + } + + if (maxEncodedParamLength < sLength) { + throw new Error('"s" specified length of "' + sLength + '", max of "' + maxEncodedParamLength + '" is acceptable'); + } + + var sOffset = offset; + offset += sLength; + + if (offset !== inputLength) { + throw new Error('Expected to consume entire buffer, but "' + (inputLength - offset) + '" bytes remain'); + } + + var rPadding = paramBytes - rLength, + sPadding = paramBytes - sLength; + + var dst = Buffer.allocUnsafe(rPadding + rLength + sPadding + sLength); + + for (offset = 0; offset < rPadding; ++offset) { + dst[offset] = 0; + } + signature.copy(dst, offset, rOffset + Math.max(-rPadding, 0), rOffset + rLength); + + offset = paramBytes; + + for (var o = offset; offset < o + sPadding; ++offset) { + dst[offset] = 0; + } + signature.copy(dst, offset, sOffset + Math.max(-sPadding, 0), sOffset + sLength); + + dst = dst.toString('base64'); + dst = base64Url(dst); + + return dst; +} + +function countPadding(buf, start, stop) { + var padding = 0; + while (start + padding < stop && buf[start + padding] === 0) { + ++padding; + } + + var needsSign = buf[start + padding] >= MAX_OCTET; + if (needsSign) { + --padding; + } + + return padding; +} + +function joseToDer(signature, alg) { + signature = signatureAsBuffer(signature); + var paramBytes = getParamBytesForAlg(alg); + + var signatureBytes = signature.length; + if (signatureBytes !== paramBytes * 2) { + throw new TypeError('"' + alg + '" signatures must be "' + paramBytes * 2 + '" bytes, saw "' + signatureBytes + '"'); + } + + var rPadding = countPadding(signature, 0, paramBytes); + var sPadding = countPadding(signature, paramBytes, signature.length); + var rLength = paramBytes - rPadding; + var sLength = paramBytes - sPadding; + + var rsBytes = 1 + 1 + rLength + 1 + 1 + sLength; + + var shortLength = rsBytes < MAX_OCTET; + + var dst = Buffer.allocUnsafe((shortLength ? 2 : 3) + rsBytes); + + var offset = 0; + dst[offset++] = ENCODED_TAG_SEQ; + if (shortLength) { + // Bit 8 has value "0" + // bits 7-1 give the length. + dst[offset++] = rsBytes; + } else { + // Bit 8 of first octet has value "1" + // bits 7-1 give the number of additional length octets. + dst[offset++] = MAX_OCTET | 1; + // length, base 256 + dst[offset++] = rsBytes & 0xff; + } + dst[offset++] = ENCODED_TAG_INT; + dst[offset++] = rLength; + if (rPadding < 0) { + dst[offset++] = 0; + offset += signature.copy(dst, offset, 0, paramBytes); + } else { + offset += signature.copy(dst, offset, rPadding, paramBytes); + } + dst[offset++] = ENCODED_TAG_INT; + dst[offset++] = sLength; + if (sPadding < 0) { + dst[offset++] = 0; + signature.copy(dst, offset, paramBytes); + } else { + signature.copy(dst, offset, paramBytes + sPadding); + } + + return dst; +} + +module.exports = { + derToJose: derToJose, + joseToDer: joseToDer +}; + + +/***/ }), + +/***/ 5028: +/***/ ((module) => { + +"use strict"; + + +function getParamSize(keySize) { + var result = ((keySize / 8) | 0) + (keySize % 8 === 0 ? 0 : 1); + return result; +} + +var paramBytesForAlg = { + ES256: getParamSize(256), + ES384: getParamSize(384), + ES512: getParamSize(521) +}; + +function getParamBytesForAlg(alg) { + var paramBytes = paramBytesForAlg[alg]; + if (paramBytes) { + return paramBytes; + } + + throw new Error('Unknown algorithm "' + alg + '"'); +} + +module.exports = getParamBytesForAlg; + + +/***/ }), + +/***/ 1424: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var once = __nccwpck_require__(5560); + +var noop = function() {}; + +var isRequest = function(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +}; + +var isChildProcess = function(stream) { + return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3 +}; + +var eos = function(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + + callback = once(callback || noop); + + var ws = stream._writableState; + var rs = stream._readableState; + var readable = opts.readable || (opts.readable !== false && stream.readable); + var writable = opts.writable || (opts.writable !== false && stream.writable); + var cancelled = false; + + var onlegacyfinish = function() { + if (!stream.writable) onfinish(); + }; + + var onfinish = function() { + writable = false; + if (!readable) callback.call(stream); + }; + + var onend = function() { + readable = false; + if (!writable) callback.call(stream); + }; + + var onexit = function(exitCode) { + callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null); + }; + + var onerror = function(err) { + callback.call(stream, err); + }; + + var onclose = function() { + process.nextTick(onclosenexttick); + }; + + var onclosenexttick = function() { + if (cancelled) return; + if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close')); + if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close')); + }; + + var onrequest = function() { + stream.req.on('finish', onfinish); + }; + + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest(); + else stream.on('request', onrequest); + } else if (writable && !ws) { // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } + + if (isChildProcess(stream)) stream.on('exit', onexit); + + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + + return function() { + cancelled = true; + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('exit', onexit); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +}; + +module.exports = eos; + + +/***/ }), + +/***/ 6577: +/***/ ((module, exports) => { + +"use strict"; +/** + * @author Toru Nagashima + * @copyright 2015 Toru Nagashima. All rights reserved. + * See LICENSE file in root directory for full license. + */ + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +/** + * @typedef {object} PrivateData + * @property {EventTarget} eventTarget The event target. + * @property {{type:string}} event The original event object. + * @property {number} eventPhase The current event phase. + * @property {EventTarget|null} currentTarget The current event target. + * @property {boolean} canceled The flag to prevent default. + * @property {boolean} stopped The flag to stop propagation. + * @property {boolean} immediateStopped The flag to stop propagation immediately. + * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null. + * @property {number} timeStamp The unix time. + * @private + */ + +/** + * Private data for event wrappers. + * @type {WeakMap} + * @private + */ +const privateData = new WeakMap(); + +/** + * Cache for wrapper classes. + * @type {WeakMap} + * @private + */ +const wrappers = new WeakMap(); + +/** + * Get private data. + * @param {Event} event The event object to get private data. + * @returns {PrivateData} The private data of the event. + * @private + */ +function pd(event) { + const retv = privateData.get(event); + console.assert( + retv != null, + "'this' is expected an Event object, but got", + event + ); + return retv +} + +/** + * https://dom.spec.whatwg.org/#set-the-canceled-flag + * @param data {PrivateData} private data. + */ +function setCancelFlag(data) { + if (data.passiveListener != null) { + if ( + typeof console !== "undefined" && + typeof console.error === "function" + ) { + console.error( + "Unable to preventDefault inside passive event listener invocation.", + data.passiveListener + ); + } + return + } + if (!data.event.cancelable) { + return + } + + data.canceled = true; + if (typeof data.event.preventDefault === "function") { + data.event.preventDefault(); + } +} + +/** + * @see https://dom.spec.whatwg.org/#interface-event + * @private + */ +/** + * The event wrapper. + * @constructor + * @param {EventTarget} eventTarget The event target of this dispatching. + * @param {Event|{type:string}} event The original event to wrap. + */ +function Event(eventTarget, event) { + privateData.set(this, { + eventTarget, + event, + eventPhase: 2, + currentTarget: eventTarget, + canceled: false, + stopped: false, + immediateStopped: false, + passiveListener: null, + timeStamp: event.timeStamp || Date.now(), + }); + + // https://heycam.github.io/webidl/#Unforgeable + Object.defineProperty(this, "isTrusted", { value: false, enumerable: true }); + + // Define accessors + const keys = Object.keys(event); + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (!(key in this)) { + Object.defineProperty(this, key, defineRedirectDescriptor(key)); + } + } +} + +// Should be enumerable, but class methods are not enumerable. +Event.prototype = { + /** + * The type of this event. + * @type {string} + */ + get type() { + return pd(this).event.type + }, + + /** + * The target of this event. + * @type {EventTarget} + */ + get target() { + return pd(this).eventTarget + }, + + /** + * The target of this event. + * @type {EventTarget} + */ + get currentTarget() { + return pd(this).currentTarget + }, + + /** + * @returns {EventTarget[]} The composed path of this event. + */ + composedPath() { + const currentTarget = pd(this).currentTarget; + if (currentTarget == null) { + return [] + } + return [currentTarget] + }, + + /** + * Constant of NONE. + * @type {number} + */ + get NONE() { + return 0 + }, + + /** + * Constant of CAPTURING_PHASE. + * @type {number} + */ + get CAPTURING_PHASE() { + return 1 + }, + + /** + * Constant of AT_TARGET. + * @type {number} + */ + get AT_TARGET() { + return 2 + }, + + /** + * Constant of BUBBLING_PHASE. + * @type {number} + */ + get BUBBLING_PHASE() { + return 3 + }, + + /** + * The target of this event. + * @type {number} + */ + get eventPhase() { + return pd(this).eventPhase + }, + + /** + * Stop event bubbling. + * @returns {void} + */ + stopPropagation() { + const data = pd(this); + + data.stopped = true; + if (typeof data.event.stopPropagation === "function") { + data.event.stopPropagation(); + } + }, + + /** + * Stop event bubbling. + * @returns {void} + */ + stopImmediatePropagation() { + const data = pd(this); + + data.stopped = true; + data.immediateStopped = true; + if (typeof data.event.stopImmediatePropagation === "function") { + data.event.stopImmediatePropagation(); + } + }, + + /** + * The flag to be bubbling. + * @type {boolean} + */ + get bubbles() { + return Boolean(pd(this).event.bubbles) + }, + + /** + * The flag to be cancelable. + * @type {boolean} + */ + get cancelable() { + return Boolean(pd(this).event.cancelable) + }, + + /** + * Cancel this event. + * @returns {void} + */ + preventDefault() { + setCancelFlag(pd(this)); + }, + + /** + * The flag to indicate cancellation state. + * @type {boolean} + */ + get defaultPrevented() { + return pd(this).canceled + }, + + /** + * The flag to be composed. + * @type {boolean} + */ + get composed() { + return Boolean(pd(this).event.composed) + }, + + /** + * The unix time of this event. + * @type {number} + */ + get timeStamp() { + return pd(this).timeStamp + }, + + /** + * The target of this event. + * @type {EventTarget} + * @deprecated + */ + get srcElement() { + return pd(this).eventTarget + }, + + /** + * The flag to stop event bubbling. + * @type {boolean} + * @deprecated + */ + get cancelBubble() { + return pd(this).stopped + }, + set cancelBubble(value) { + if (!value) { + return + } + const data = pd(this); + + data.stopped = true; + if (typeof data.event.cancelBubble === "boolean") { + data.event.cancelBubble = true; + } + }, + + /** + * The flag to indicate cancellation state. + * @type {boolean} + * @deprecated + */ + get returnValue() { + return !pd(this).canceled + }, + set returnValue(value) { + if (!value) { + setCancelFlag(pd(this)); + } + }, + + /** + * Initialize this event object. But do nothing under event dispatching. + * @param {string} type The event type. + * @param {boolean} [bubbles=false] The flag to be possible to bubble up. + * @param {boolean} [cancelable=false] The flag to be possible to cancel. + * @deprecated + */ + initEvent() { + // Do nothing. + }, +}; + +// `constructor` is not enumerable. +Object.defineProperty(Event.prototype, "constructor", { + value: Event, + configurable: true, + writable: true, +}); + +// Ensure `event instanceof window.Event` is `true`. +if (typeof window !== "undefined" && typeof window.Event !== "undefined") { + Object.setPrototypeOf(Event.prototype, window.Event.prototype); + + // Make association for wrappers. + wrappers.set(window.Event.prototype, Event); +} + +/** + * Get the property descriptor to redirect a given property. + * @param {string} key Property name to define property descriptor. + * @returns {PropertyDescriptor} The property descriptor to redirect the property. + * @private + */ +function defineRedirectDescriptor(key) { + return { + get() { + return pd(this).event[key] + }, + set(value) { + pd(this).event[key] = value; + }, + configurable: true, + enumerable: true, + } +} + +/** + * Get the property descriptor to call a given method property. + * @param {string} key Property name to define property descriptor. + * @returns {PropertyDescriptor} The property descriptor to call the method property. + * @private + */ +function defineCallDescriptor(key) { + return { + value() { + const event = pd(this).event; + return event[key].apply(event, arguments) + }, + configurable: true, + enumerable: true, + } +} + +/** + * Define new wrapper class. + * @param {Function} BaseEvent The base wrapper class. + * @param {Object} proto The prototype of the original event. + * @returns {Function} The defined wrapper class. + * @private + */ +function defineWrapper(BaseEvent, proto) { + const keys = Object.keys(proto); + if (keys.length === 0) { + return BaseEvent + } + + /** CustomEvent */ + function CustomEvent(eventTarget, event) { + BaseEvent.call(this, eventTarget, event); + } + + CustomEvent.prototype = Object.create(BaseEvent.prototype, { + constructor: { value: CustomEvent, configurable: true, writable: true }, + }); + + // Define accessors. + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (!(key in BaseEvent.prototype)) { + const descriptor = Object.getOwnPropertyDescriptor(proto, key); + const isFunc = typeof descriptor.value === "function"; + Object.defineProperty( + CustomEvent.prototype, + key, + isFunc + ? defineCallDescriptor(key) + : defineRedirectDescriptor(key) + ); + } + } + + return CustomEvent +} + +/** + * Get the wrapper class of a given prototype. + * @param {Object} proto The prototype of the original event to get its wrapper. + * @returns {Function} The wrapper class. + * @private + */ +function getWrapper(proto) { + if (proto == null || proto === Object.prototype) { + return Event + } + + let wrapper = wrappers.get(proto); + if (wrapper == null) { + wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto); + wrappers.set(proto, wrapper); + } + return wrapper +} + +/** + * Wrap a given event to management a dispatching. + * @param {EventTarget} eventTarget The event target of this dispatching. + * @param {Object} event The event to wrap. + * @returns {Event} The wrapper instance. + * @private + */ +function wrapEvent(eventTarget, event) { + const Wrapper = getWrapper(Object.getPrototypeOf(event)); + return new Wrapper(eventTarget, event) +} + +/** + * Get the immediateStopped flag of a given event. + * @param {Event} event The event to get. + * @returns {boolean} The flag to stop propagation immediately. + * @private + */ +function isStopped(event) { + return pd(event).immediateStopped +} + +/** + * Set the current event phase of a given event. + * @param {Event} event The event to set current target. + * @param {number} eventPhase New event phase. + * @returns {void} + * @private + */ +function setEventPhase(event, eventPhase) { + pd(event).eventPhase = eventPhase; +} + +/** + * Set the current target of a given event. + * @param {Event} event The event to set current target. + * @param {EventTarget|null} currentTarget New current target. + * @returns {void} + * @private + */ +function setCurrentTarget(event, currentTarget) { + pd(event).currentTarget = currentTarget; +} + +/** + * Set a passive listener of a given event. + * @param {Event} event The event to set current target. + * @param {Function|null} passiveListener New passive listener. + * @returns {void} + * @private + */ +function setPassiveListener(event, passiveListener) { + pd(event).passiveListener = passiveListener; +} + +/** + * @typedef {object} ListenerNode + * @property {Function} listener + * @property {1|2|3} listenerType + * @property {boolean} passive + * @property {boolean} once + * @property {ListenerNode|null} next + * @private + */ + +/** + * @type {WeakMap>} + * @private + */ +const listenersMap = new WeakMap(); + +// Listener types +const CAPTURE = 1; +const BUBBLE = 2; +const ATTRIBUTE = 3; + +/** + * Check whether a given value is an object or not. + * @param {any} x The value to check. + * @returns {boolean} `true` if the value is an object. + */ +function isObject(x) { + return x !== null && typeof x === "object" //eslint-disable-line no-restricted-syntax +} + +/** + * Get listeners. + * @param {EventTarget} eventTarget The event target to get. + * @returns {Map} The listeners. + * @private + */ +function getListeners(eventTarget) { + const listeners = listenersMap.get(eventTarget); + if (listeners == null) { + throw new TypeError( + "'this' is expected an EventTarget object, but got another value." + ) + } + return listeners +} + +/** + * Get the property descriptor for the event attribute of a given event. + * @param {string} eventName The event name to get property descriptor. + * @returns {PropertyDescriptor} The property descriptor. + * @private + */ +function defineEventAttributeDescriptor(eventName) { + return { + get() { + const listeners = getListeners(this); + let node = listeners.get(eventName); + while (node != null) { + if (node.listenerType === ATTRIBUTE) { + return node.listener + } + node = node.next; + } + return null + }, + + set(listener) { + if (typeof listener !== "function" && !isObject(listener)) { + listener = null; // eslint-disable-line no-param-reassign + } + const listeners = getListeners(this); + + // Traverse to the tail while removing old value. + let prev = null; + let node = listeners.get(eventName); + while (node != null) { + if (node.listenerType === ATTRIBUTE) { + // Remove old value. + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + } else { + prev = node; + } + + node = node.next; + } + + // Add new value. + if (listener !== null) { + const newNode = { + listener, + listenerType: ATTRIBUTE, + passive: false, + once: false, + next: null, + }; + if (prev === null) { + listeners.set(eventName, newNode); + } else { + prev.next = newNode; + } + } + }, + configurable: true, + enumerable: true, + } +} + +/** + * Define an event attribute (e.g. `eventTarget.onclick`). + * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite. + * @param {string} eventName The event name to define. + * @returns {void} + */ +function defineEventAttribute(eventTargetPrototype, eventName) { + Object.defineProperty( + eventTargetPrototype, + `on${eventName}`, + defineEventAttributeDescriptor(eventName) + ); +} + +/** + * Define a custom EventTarget with event attributes. + * @param {string[]} eventNames Event names for event attributes. + * @returns {EventTarget} The custom EventTarget. + * @private + */ +function defineCustomEventTarget(eventNames) { + /** CustomEventTarget */ + function CustomEventTarget() { + EventTarget.call(this); + } + + CustomEventTarget.prototype = Object.create(EventTarget.prototype, { + constructor: { + value: CustomEventTarget, + configurable: true, + writable: true, + }, + }); + + for (let i = 0; i < eventNames.length; ++i) { + defineEventAttribute(CustomEventTarget.prototype, eventNames[i]); + } + + return CustomEventTarget +} + +/** + * EventTarget. + * + * - This is constructor if no arguments. + * - This is a function which returns a CustomEventTarget constructor if there are arguments. + * + * For example: + * + * class A extends EventTarget {} + * class B extends EventTarget("message") {} + * class C extends EventTarget("message", "error") {} + * class D extends EventTarget(["message", "error"]) {} + */ +function EventTarget() { + /*eslint-disable consistent-return */ + if (this instanceof EventTarget) { + listenersMap.set(this, new Map()); + return + } + if (arguments.length === 1 && Array.isArray(arguments[0])) { + return defineCustomEventTarget(arguments[0]) + } + if (arguments.length > 0) { + const types = new Array(arguments.length); + for (let i = 0; i < arguments.length; ++i) { + types[i] = arguments[i]; + } + return defineCustomEventTarget(types) + } + throw new TypeError("Cannot call a class as a function") + /*eslint-enable consistent-return */ +} + +// Should be enumerable, but class methods are not enumerable. +EventTarget.prototype = { + /** + * Add a given listener to this event target. + * @param {string} eventName The event name to add. + * @param {Function} listener The listener to add. + * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. + * @returns {void} + */ + addEventListener(eventName, listener, options) { + if (listener == null) { + return + } + if (typeof listener !== "function" && !isObject(listener)) { + throw new TypeError("'listener' should be a function or an object.") + } + + const listeners = getListeners(this); + const optionsIsObj = isObject(options); + const capture = optionsIsObj + ? Boolean(options.capture) + : Boolean(options); + const listenerType = capture ? CAPTURE : BUBBLE; + const newNode = { + listener, + listenerType, + passive: optionsIsObj && Boolean(options.passive), + once: optionsIsObj && Boolean(options.once), + next: null, + }; + + // Set it as the first node if the first node is null. + let node = listeners.get(eventName); + if (node === undefined) { + listeners.set(eventName, newNode); + return + } + + // Traverse to the tail while checking duplication.. + let prev = null; + while (node != null) { + if ( + node.listener === listener && + node.listenerType === listenerType + ) { + // Should ignore duplication. + return + } + prev = node; + node = node.next; + } + + // Add it. + prev.next = newNode; + }, + + /** + * Remove a given listener from this event target. + * @param {string} eventName The event name to remove. + * @param {Function} listener The listener to remove. + * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. + * @returns {void} + */ + removeEventListener(eventName, listener, options) { + if (listener == null) { + return + } + + const listeners = getListeners(this); + const capture = isObject(options) + ? Boolean(options.capture) + : Boolean(options); + const listenerType = capture ? CAPTURE : BUBBLE; + + let prev = null; + let node = listeners.get(eventName); + while (node != null) { + if ( + node.listener === listener && + node.listenerType === listenerType + ) { + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + return + } + + prev = node; + node = node.next; + } + }, + + /** + * Dispatch a given event. + * @param {Event|{type:string}} event The event to dispatch. + * @returns {boolean} `false` if canceled. + */ + dispatchEvent(event) { + if (event == null || typeof event.type !== "string") { + throw new TypeError('"event.type" should be a string.') + } + + // If listeners aren't registered, terminate. + const listeners = getListeners(this); + const eventName = event.type; + let node = listeners.get(eventName); + if (node == null) { + return true + } + + // Since we cannot rewrite several properties, so wrap object. + const wrappedEvent = wrapEvent(this, event); + + // This doesn't process capturing phase and bubbling phase. + // This isn't participating in a tree. + let prev = null; + while (node != null) { + // Remove this listener if it's once + if (node.once) { + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + } else { + prev = node; + } + + // Call this listener + setPassiveListener( + wrappedEvent, + node.passive ? node.listener : null + ); + if (typeof node.listener === "function") { + try { + node.listener.call(this, wrappedEvent); + } catch (err) { + if ( + typeof console !== "undefined" && + typeof console.error === "function" + ) { + console.error(err); + } + } + } else if ( + node.listenerType !== ATTRIBUTE && + typeof node.listener.handleEvent === "function" + ) { + node.listener.handleEvent(wrappedEvent); + } + + // Break if `event.stopImmediatePropagation` was called. + if (isStopped(wrappedEvent)) { + break + } + + node = node.next; + } + setPassiveListener(wrappedEvent, null); + setEventPhase(wrappedEvent, 0); + setCurrentTarget(wrappedEvent, null); + + return !wrappedEvent.defaultPrevented + }, +}; + +// `constructor` is not enumerable. +Object.defineProperty(EventTarget.prototype, "constructor", { + value: EventTarget, + configurable: true, + writable: true, +}); + +// Ensure `eventTarget instanceof window.EventTarget` is `true`. +if ( + typeof window !== "undefined" && + typeof window.EventTarget !== "undefined" +) { + Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype); +} + +exports.defineEventAttribute = defineEventAttribute; +exports.EventTarget = EventTarget; +exports["default"] = EventTarget; + +module.exports = EventTarget +module.exports.EventTarget = module.exports["default"] = EventTarget +module.exports.defineEventAttribute = defineEventAttribute +//# sourceMappingURL=event-target-shim.js.map + + +/***/ }), + +/***/ 3860: +/***/ ((module) => { + +"use strict"; + + +var hasOwn = Object.prototype.hasOwnProperty; +var toStr = Object.prototype.toString; +var defineProperty = Object.defineProperty; +var gOPD = Object.getOwnPropertyDescriptor; + +var isArray = function isArray(arr) { + if (typeof Array.isArray === 'function') { + return Array.isArray(arr); + } + + return toStr.call(arr) === '[object Array]'; +}; + +var isPlainObject = function isPlainObject(obj) { + if (!obj || toStr.call(obj) !== '[object Object]') { + return false; + } + + var hasOwnConstructor = hasOwn.call(obj, 'constructor'); + var hasIsPrototypeOf = obj.constructor && obj.constructor.prototype && hasOwn.call(obj.constructor.prototype, 'isPrototypeOf'); + // Not own constructor property must be Object + if (obj.constructor && !hasOwnConstructor && !hasIsPrototypeOf) { + return false; + } + + // Own properties are enumerated firstly, so to speed up, + // if last one is own, then all properties are own. + var key; + for (key in obj) { /**/ } + + return typeof key === 'undefined' || hasOwn.call(obj, key); +}; + +// If name is '__proto__', and Object.defineProperty is available, define __proto__ as an own property on target +var setProperty = function setProperty(target, options) { + if (defineProperty && options.name === '__proto__') { + defineProperty(target, options.name, { + enumerable: true, + configurable: true, + value: options.newValue, + writable: true + }); + } else { + target[options.name] = options.newValue; + } +}; + +// Return undefined instead of __proto__ if '__proto__' is not an own property +var getProperty = function getProperty(obj, name) { + if (name === '__proto__') { + if (!hasOwn.call(obj, name)) { + return void 0; + } else if (gOPD) { + // In early versions of node, obj['__proto__'] is buggy when obj has + // __proto__ as an own property. Object.getOwnPropertyDescriptor() works. + return gOPD(obj, name).value; + } + } + + return obj[name]; +}; + +module.exports = function extend() { + var options, name, src, copy, copyIsArray, clone; + var target = arguments[0]; + var i = 1; + var length = arguments.length; + var deep = false; + + // Handle a deep copy situation + if (typeof target === 'boolean') { + deep = target; + target = arguments[1] || {}; + // skip the boolean and the target + i = 2; + } + if (target == null || (typeof target !== 'object' && typeof target !== 'function')) { + target = {}; + } + + for (; i < length; ++i) { + options = arguments[i]; + // Only deal with non-null/undefined values + if (options != null) { + // Extend the base object + for (name in options) { + src = getProperty(target, name); + copy = getProperty(options, name); + + // Prevent never-ending loop + if (target !== copy) { + // Recurse if we're merging plain objects or arrays + if (deep && copy && (isPlainObject(copy) || (copyIsArray = isArray(copy)))) { + if (copyIsArray) { + copyIsArray = false; + clone = src && isArray(src) ? src : []; + } else { + clone = src && isPlainObject(src) ? src : {}; + } + + // Never move original objects, clone them + setProperty(target, { name: name, newValue: extend(deep, clone, copy) }); + + // Don't bring in undefined values + } else if (typeof copy !== 'undefined') { + setProperty(target, { name: name, newValue: copy }); + } + } + } + } + } + + // Return the modified object + return target; +}; + + +/***/ }), + +/***/ 9741: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const validator = __nccwpck_require__(9433); +const XMLParser = __nccwpck_require__(9844); +const XMLBuilder = __nccwpck_require__(659); + +module.exports = { + XMLParser: XMLParser, + XMLValidator: validator, + XMLBuilder: XMLBuilder +} + +/***/ }), + +/***/ 812: +/***/ ((module) => { + +function getIgnoreAttributesFn(ignoreAttributes) { + if (typeof ignoreAttributes === 'function') { + return ignoreAttributes + } + if (Array.isArray(ignoreAttributes)) { + return (attrName) => { + for (const pattern of ignoreAttributes) { + if (typeof pattern === 'string' && attrName === pattern) { + return true + } + if (pattern instanceof RegExp && pattern.test(attrName)) { + return true + } + } + } + } + return () => false +} + +module.exports = getIgnoreAttributesFn + +/***/ }), + +/***/ 7019: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +const nameStartChar = ':A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD'; +const nameChar = nameStartChar + '\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040'; +const nameRegexp = '[' + nameStartChar + '][' + nameChar + ']*' +const regexName = new RegExp('^' + nameRegexp + '$'); + +const getAllMatches = function(string, regex) { + const matches = []; + let match = regex.exec(string); + while (match) { + const allmatches = []; + allmatches.startIndex = regex.lastIndex - match[0].length; + const len = match.length; + for (let index = 0; index < len; index++) { + allmatches.push(match[index]); + } + matches.push(allmatches); + match = regex.exec(string); + } + return matches; +}; + +const isName = function(string) { + const match = regexName.exec(string); + return !(match === null || typeof match === 'undefined'); +}; + +exports.isExist = function(v) { + return typeof v !== 'undefined'; +}; + +exports.isEmptyObject = function(obj) { + return Object.keys(obj).length === 0; +}; + +/** + * Copy all the properties of a into b. + * @param {*} target + * @param {*} a + */ +exports.merge = function(target, a, arrayMode) { + if (a) { + const keys = Object.keys(a); // will return an array of own properties + const len = keys.length; //don't make it inline + for (let i = 0; i < len; i++) { + if (arrayMode === 'strict') { + target[keys[i]] = [ a[keys[i]] ]; + } else { + target[keys[i]] = a[keys[i]]; + } + } + } +}; +/* exports.merge =function (b,a){ + return Object.assign(b,a); +} */ + +exports.getValue = function(v) { + if (exports.isExist(v)) { + return v; + } else { + return ''; + } +}; + +// const fakeCall = function(a) {return a;}; +// const fakeCallNoReturn = function() {}; + +exports.isName = isName; +exports.getAllMatches = getAllMatches; +exports.nameRegexp = nameRegexp; + + +/***/ }), + +/***/ 9433: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +const util = __nccwpck_require__(7019); + +const defaultOptions = { + allowBooleanAttributes: false, //A tag can have attributes without any value + unpairedTags: [] +}; + +//const tagsPattern = new RegExp("<\\/?([\\w:\\-_\.]+)\\s*\/?>","g"); +exports.validate = function (xmlData, options) { + options = Object.assign({}, defaultOptions, options); + + //xmlData = xmlData.replace(/(\r\n|\n|\r)/gm,"");//make it single line + //xmlData = xmlData.replace(/(^\s*<\?xml.*?\?>)/g,"");//Remove XML starting tag + //xmlData = xmlData.replace(/()/g,"");//Remove DOCTYPE + const tags = []; + let tagFound = false; + + //indicates that the root tag has been closed (aka. depth 0 has been reached) + let reachedRoot = false; + + if (xmlData[0] === '\ufeff') { + // check for byte order mark (BOM) + xmlData = xmlData.substr(1); + } + + for (let i = 0; i < xmlData.length; i++) { + + if (xmlData[i] === '<' && xmlData[i+1] === '?') { + i+=2; + i = readPI(xmlData,i); + if (i.err) return i; + }else if (xmlData[i] === '<') { + //starting of tag + //read until you reach to '>' avoiding any '>' in attribute value + let tagStartPos = i; + i++; + + if (xmlData[i] === '!') { + i = readCommentAndCDATA(xmlData, i); + continue; + } else { + let closingTag = false; + if (xmlData[i] === '/') { + //closing tag + closingTag = true; + i++; + } + //read tagname + let tagName = ''; + for (; i < xmlData.length && + xmlData[i] !== '>' && + xmlData[i] !== ' ' && + xmlData[i] !== '\t' && + xmlData[i] !== '\n' && + xmlData[i] !== '\r'; i++ + ) { + tagName += xmlData[i]; + } + tagName = tagName.trim(); + //console.log(tagName); + + if (tagName[tagName.length - 1] === '/') { + //self closing tag without attributes + tagName = tagName.substring(0, tagName.length - 1); + //continue; + i--; + } + if (!validateTagName(tagName)) { + let msg; + if (tagName.trim().length === 0) { + msg = "Invalid space after '<'."; + } else { + msg = "Tag '"+tagName+"' is an invalid name."; + } + return getErrorObject('InvalidTag', msg, getLineNumberForPosition(xmlData, i)); + } + + const result = readAttributeStr(xmlData, i); + if (result === false) { + return getErrorObject('InvalidAttr', "Attributes for '"+tagName+"' have open quote.", getLineNumberForPosition(xmlData, i)); + } + let attrStr = result.value; + i = result.index; + + if (attrStr[attrStr.length - 1] === '/') { + //self closing tag + const attrStrStart = i - attrStr.length; + attrStr = attrStr.substring(0, attrStr.length - 1); + const isValid = validateAttributeString(attrStr, options); + if (isValid === true) { + tagFound = true; + //continue; //text may presents after self closing tag + } else { + //the result from the nested function returns the position of the error within the attribute + //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute + //this gives us the absolute index in the entire xml, which we can use to find the line at last + return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, attrStrStart + isValid.err.line)); + } + } else if (closingTag) { + if (!result.tagClosed) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' doesn't have proper closing.", getLineNumberForPosition(xmlData, i)); + } else if (attrStr.trim().length > 0) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' can't have attributes or invalid starting.", getLineNumberForPosition(xmlData, tagStartPos)); + } else if (tags.length === 0) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' has not been opened.", getLineNumberForPosition(xmlData, tagStartPos)); + } else { + const otg = tags.pop(); + if (tagName !== otg.tagName) { + let openPos = getLineNumberForPosition(xmlData, otg.tagStartPos); + return getErrorObject('InvalidTag', + "Expected closing tag '"+otg.tagName+"' (opened in line "+openPos.line+", col "+openPos.col+") instead of closing tag '"+tagName+"'.", + getLineNumberForPosition(xmlData, tagStartPos)); + } + + //when there are no more tags, we reached the root level. + if (tags.length == 0) { + reachedRoot = true; + } + } + } else { + const isValid = validateAttributeString(attrStr, options); + if (isValid !== true) { + //the result from the nested function returns the position of the error within the attribute + //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute + //this gives us the absolute index in the entire xml, which we can use to find the line at last + return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, i - attrStr.length + isValid.err.line)); + } + + //if the root level has been reached before ... + if (reachedRoot === true) { + return getErrorObject('InvalidXml', 'Multiple possible root nodes found.', getLineNumberForPosition(xmlData, i)); + } else if(options.unpairedTags.indexOf(tagName) !== -1){ + //don't push into stack + } else { + tags.push({tagName, tagStartPos}); + } + tagFound = true; + } + + //skip tag text value + //It may include comments and CDATA value + for (i++; i < xmlData.length; i++) { + if (xmlData[i] === '<') { + if (xmlData[i + 1] === '!') { + //comment or CADATA + i++; + i = readCommentAndCDATA(xmlData, i); + continue; + } else if (xmlData[i+1] === '?') { + i = readPI(xmlData, ++i); + if (i.err) return i; + } else{ + break; + } + } else if (xmlData[i] === '&') { + const afterAmp = validateAmpersand(xmlData, i); + if (afterAmp == -1) + return getErrorObject('InvalidChar', "char '&' is not expected.", getLineNumberForPosition(xmlData, i)); + i = afterAmp; + }else{ + if (reachedRoot === true && !isWhiteSpace(xmlData[i])) { + return getErrorObject('InvalidXml', "Extra text at the end", getLineNumberForPosition(xmlData, i)); + } + } + } //end of reading tag text value + if (xmlData[i] === '<') { + i--; + } + } + } else { + if ( isWhiteSpace(xmlData[i])) { + continue; + } + return getErrorObject('InvalidChar', "char '"+xmlData[i]+"' is not expected.", getLineNumberForPosition(xmlData, i)); + } + } + + if (!tagFound) { + return getErrorObject('InvalidXml', 'Start tag expected.', 1); + }else if (tags.length == 1) { + return getErrorObject('InvalidTag', "Unclosed tag '"+tags[0].tagName+"'.", getLineNumberForPosition(xmlData, tags[0].tagStartPos)); + }else if (tags.length > 0) { + return getErrorObject('InvalidXml', "Invalid '"+ + JSON.stringify(tags.map(t => t.tagName), null, 4).replace(/\r?\n/g, '')+ + "' found.", {line: 1, col: 1}); + } + + return true; +}; + +function isWhiteSpace(char){ + return char === ' ' || char === '\t' || char === '\n' || char === '\r'; +} +/** + * Read Processing insstructions and skip + * @param {*} xmlData + * @param {*} i + */ +function readPI(xmlData, i) { + const start = i; + for (; i < xmlData.length; i++) { + if (xmlData[i] == '?' || xmlData[i] == ' ') { + //tagname + const tagname = xmlData.substr(start, i - start); + if (i > 5 && tagname === 'xml') { + return getErrorObject('InvalidXml', 'XML declaration allowed only at the start of the document.', getLineNumberForPosition(xmlData, i)); + } else if (xmlData[i] == '?' && xmlData[i + 1] == '>') { + //check if valid attribut string + i++; + break; + } else { + continue; + } + } + } + return i; +} + +function readCommentAndCDATA(xmlData, i) { + if (xmlData.length > i + 5 && xmlData[i + 1] === '-' && xmlData[i + 2] === '-') { + //comment + for (i += 3; i < xmlData.length; i++) { + if (xmlData[i] === '-' && xmlData[i + 1] === '-' && xmlData[i + 2] === '>') { + i += 2; + break; + } + } + } else if ( + xmlData.length > i + 8 && + xmlData[i + 1] === 'D' && + xmlData[i + 2] === 'O' && + xmlData[i + 3] === 'C' && + xmlData[i + 4] === 'T' && + xmlData[i + 5] === 'Y' && + xmlData[i + 6] === 'P' && + xmlData[i + 7] === 'E' + ) { + let angleBracketsCount = 1; + for (i += 8; i < xmlData.length; i++) { + if (xmlData[i] === '<') { + angleBracketsCount++; + } else if (xmlData[i] === '>') { + angleBracketsCount--; + if (angleBracketsCount === 0) { + break; + } + } + } + } else if ( + xmlData.length > i + 9 && + xmlData[i + 1] === '[' && + xmlData[i + 2] === 'C' && + xmlData[i + 3] === 'D' && + xmlData[i + 4] === 'A' && + xmlData[i + 5] === 'T' && + xmlData[i + 6] === 'A' && + xmlData[i + 7] === '[' + ) { + for (i += 8; i < xmlData.length; i++) { + if (xmlData[i] === ']' && xmlData[i + 1] === ']' && xmlData[i + 2] === '>') { + i += 2; + break; + } + } + } + + return i; +} + +const doubleQuote = '"'; +const singleQuote = "'"; + +/** + * Keep reading xmlData until '<' is found outside the attribute value. + * @param {string} xmlData + * @param {number} i + */ +function readAttributeStr(xmlData, i) { + let attrStr = ''; + let startChar = ''; + let tagClosed = false; + for (; i < xmlData.length; i++) { + if (xmlData[i] === doubleQuote || xmlData[i] === singleQuote) { + if (startChar === '') { + startChar = xmlData[i]; + } else if (startChar !== xmlData[i]) { + //if vaue is enclosed with double quote then single quotes are allowed inside the value and vice versa + } else { + startChar = ''; + } + } else if (xmlData[i] === '>') { + if (startChar === '') { + tagClosed = true; + break; + } + } + attrStr += xmlData[i]; + } + if (startChar !== '') { + return false; + } + + return { + value: attrStr, + index: i, + tagClosed: tagClosed + }; +} + +/** + * Select all the attributes whether valid or invalid. + */ +const validAttrStrRegxp = new RegExp('(\\s*)([^\\s=]+)(\\s*=)?(\\s*([\'"])(([\\s\\S])*?)\\5)?', 'g'); + +//attr, ="sd", a="amit's", a="sd"b="saf", ab cd="" + +function validateAttributeString(attrStr, options) { + //console.log("start:"+attrStr+":end"); + + //if(attrStr.trim().length === 0) return true; //empty string + + const matches = util.getAllMatches(attrStr, validAttrStrRegxp); + const attrNames = {}; + + for (let i = 0; i < matches.length; i++) { + if (matches[i][1].length === 0) { + //nospace before attribute name: a="sd"b="saf" + return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' has no space in starting.", getPositionFromMatch(matches[i])) + } else if (matches[i][3] !== undefined && matches[i][4] === undefined) { + return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' is without value.", getPositionFromMatch(matches[i])); + } else if (matches[i][3] === undefined && !options.allowBooleanAttributes) { + //independent attribute: ab + return getErrorObject('InvalidAttr', "boolean attribute '"+matches[i][2]+"' is not allowed.", getPositionFromMatch(matches[i])); + } + /* else if(matches[i][6] === undefined){//attribute without value: ab= + return { err: { code:"InvalidAttr",msg:"attribute " + matches[i][2] + " has no value assigned."}}; + } */ + const attrName = matches[i][2]; + if (!validateAttrName(attrName)) { + return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is an invalid name.", getPositionFromMatch(matches[i])); + } + if (!attrNames.hasOwnProperty(attrName)) { + //check for duplicate attribute. + attrNames[attrName] = 1; + } else { + return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is repeated.", getPositionFromMatch(matches[i])); + } + } + + return true; +} + +function validateNumberAmpersand(xmlData, i) { + let re = /\d/; + if (xmlData[i] === 'x') { + i++; + re = /[\da-fA-F]/; + } + for (; i < xmlData.length; i++) { + if (xmlData[i] === ';') + return i; + if (!xmlData[i].match(re)) + break; + } + return -1; +} + +function validateAmpersand(xmlData, i) { + // https://www.w3.org/TR/xml/#dt-charref + i++; + if (xmlData[i] === ';') + return -1; + if (xmlData[i] === '#') { + i++; + return validateNumberAmpersand(xmlData, i); + } + let count = 0; + for (; i < xmlData.length; i++, count++) { + if (xmlData[i].match(/\w/) && count < 20) + continue; + if (xmlData[i] === ';') + break; + return -1; + } + return i; +} + +function getErrorObject(code, message, lineNumber) { + return { + err: { + code: code, + msg: message, + line: lineNumber.line || lineNumber, + col: lineNumber.col, + }, + }; +} + +function validateAttrName(attrName) { + return util.isName(attrName); +} + +// const startsWithXML = /^xml/i; + +function validateTagName(tagname) { + return util.isName(tagname) /* && !tagname.match(startsWithXML) */; +} + +//this function returns the line number for the character at the given index +function getLineNumberForPosition(xmlData, index) { + const lines = xmlData.substring(0, index).split(/\r?\n/); + return { + line: lines.length, + + // column number is last line's length + 1, because column numbering starts at 1: + col: lines[lines.length - 1].length + 1 + }; +} + +//this function returns the position of the first character of match within attrStr +function getPositionFromMatch(match) { + return match.startIndex + match[1].length; +} + + +/***/ }), + +/***/ 659: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +//parse Empty Node as self closing node +const buildFromOrderedJs = __nccwpck_require__(6378); +const getIgnoreAttributesFn = __nccwpck_require__(812) + +const defaultOptions = { + attributeNamePrefix: '@_', + attributesGroupName: false, + textNodeName: '#text', + ignoreAttributes: true, + cdataPropName: false, + format: false, + indentBy: ' ', + suppressEmptyNode: false, + suppressUnpairedNode: true, + suppressBooleanAttributes: true, + tagValueProcessor: function(key, a) { + return a; + }, + attributeValueProcessor: function(attrName, a) { + return a; + }, + preserveOrder: false, + commentPropName: false, + unpairedTags: [], + entities: [ + { regex: new RegExp("&", "g"), val: "&" },//it must be on top + { regex: new RegExp(">", "g"), val: ">" }, + { regex: new RegExp("<", "g"), val: "<" }, + { regex: new RegExp("\'", "g"), val: "'" }, + { regex: new RegExp("\"", "g"), val: """ } + ], + processEntities: true, + stopNodes: [], + // transformTagName: false, + // transformAttributeName: false, + oneListGroup: false +}; + +function Builder(options) { + this.options = Object.assign({}, defaultOptions, options); + if (this.options.ignoreAttributes === true || this.options.attributesGroupName) { + this.isAttribute = function(/*a*/) { + return false; + }; + } else { + this.ignoreAttributesFn = getIgnoreAttributesFn(this.options.ignoreAttributes) + this.attrPrefixLen = this.options.attributeNamePrefix.length; + this.isAttribute = isAttribute; + } + + this.processTextOrObjNode = processTextOrObjNode + + if (this.options.format) { + this.indentate = indentate; + this.tagEndChar = '>\n'; + this.newLine = '\n'; + } else { + this.indentate = function() { + return ''; + }; + this.tagEndChar = '>'; + this.newLine = ''; + } +} + +Builder.prototype.build = function(jObj) { + if(this.options.preserveOrder){ + return buildFromOrderedJs(jObj, this.options); + }else { + if(Array.isArray(jObj) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1){ + jObj = { + [this.options.arrayNodeName] : jObj + } + } + return this.j2x(jObj, 0, []).val; + } +}; + +Builder.prototype.j2x = function(jObj, level, ajPath) { + let attrStr = ''; + let val = ''; + const jPath = ajPath.join('.') + for (let key in jObj) { + if(!Object.prototype.hasOwnProperty.call(jObj, key)) continue; + if (typeof jObj[key] === 'undefined') { + // supress undefined node only if it is not an attribute + if (this.isAttribute(key)) { + val += ''; + } + } else if (jObj[key] === null) { + // null attribute should be ignored by the attribute list, but should not cause the tag closing + if (this.isAttribute(key)) { + val += ''; + } else if (key === this.options.cdataPropName) { + val += ''; + } else if (key[0] === '?') { + val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; + } else { + val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } + // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } else if (jObj[key] instanceof Date) { + val += this.buildTextValNode(jObj[key], key, '', level); + } else if (typeof jObj[key] !== 'object') { + //premitive type + const attr = this.isAttribute(key); + if (attr && !this.ignoreAttributesFn(attr, jPath)) { + attrStr += this.buildAttrPairStr(attr, '' + jObj[key]); + } else if (!attr) { + //tag value + if (key === this.options.textNodeName) { + let newval = this.options.tagValueProcessor(key, '' + jObj[key]); + val += this.replaceEntitiesValue(newval); + } else { + val += this.buildTextValNode(jObj[key], key, '', level); + } + } + } else if (Array.isArray(jObj[key])) { + //repeated nodes + const arrLen = jObj[key].length; + let listTagVal = ""; + let listTagAttr = ""; + for (let j = 0; j < arrLen; j++) { + const item = jObj[key][j]; + if (typeof item === 'undefined') { + // supress undefined node + } else if (item === null) { + if(key[0] === "?") val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; + else val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } else if (typeof item === 'object') { + if(this.options.oneListGroup){ + const result = this.j2x(item, level + 1, ajPath.concat(key)); + listTagVal += result.val; + if (this.options.attributesGroupName && item.hasOwnProperty(this.options.attributesGroupName)) { + listTagAttr += result.attrStr + } + }else{ + listTagVal += this.processTextOrObjNode(item, key, level, ajPath) + } + } else { + if (this.options.oneListGroup) { + let textValue = this.options.tagValueProcessor(key, item); + textValue = this.replaceEntitiesValue(textValue); + listTagVal += textValue; + } else { + listTagVal += this.buildTextValNode(item, key, '', level); + } + } + } + if(this.options.oneListGroup){ + listTagVal = this.buildObjectNode(listTagVal, key, listTagAttr, level); + } + val += listTagVal; + } else { + //nested node + if (this.options.attributesGroupName && key === this.options.attributesGroupName) { + const Ks = Object.keys(jObj[key]); + const L = Ks.length; + for (let j = 0; j < L; j++) { + attrStr += this.buildAttrPairStr(Ks[j], '' + jObj[key][Ks[j]]); + } + } else { + val += this.processTextOrObjNode(jObj[key], key, level, ajPath) + } + } + } + return {attrStr: attrStr, val: val}; +}; + +Builder.prototype.buildAttrPairStr = function(attrName, val){ + val = this.options.attributeValueProcessor(attrName, '' + val); + val = this.replaceEntitiesValue(val); + if (this.options.suppressBooleanAttributes && val === "true") { + return ' ' + attrName; + } else return ' ' + attrName + '="' + val + '"'; +} + +function processTextOrObjNode (object, key, level, ajPath) { + const result = this.j2x(object, level + 1, ajPath.concat(key)); + if (object[this.options.textNodeName] !== undefined && Object.keys(object).length === 1) { + return this.buildTextValNode(object[this.options.textNodeName], key, result.attrStr, level); + } else { + return this.buildObjectNode(result.val, key, result.attrStr, level); + } +} + +Builder.prototype.buildObjectNode = function(val, key, attrStr, level) { + if(val === ""){ + if(key[0] === "?") return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; + else { + return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; + } + }else{ + + let tagEndExp = '' + val + tagEndExp ); + } else if (this.options.commentPropName !== false && key === this.options.commentPropName && piClosingChar.length === 0) { + return this.indentate(level) + `` + this.newLine; + }else { + return ( + this.indentate(level) + '<' + key + attrStr + piClosingChar + this.tagEndChar + + val + + this.indentate(level) + tagEndExp ); + } + } +} + +Builder.prototype.closeTag = function(key){ + let closeTag = ""; + if(this.options.unpairedTags.indexOf(key) !== -1){ //unpaired + if(!this.options.suppressUnpairedNode) closeTag = "/" + }else if(this.options.suppressEmptyNode){ //empty + closeTag = "/"; + }else{ + closeTag = `>` + this.newLine; + }else if (this.options.commentPropName !== false && key === this.options.commentPropName) { + return this.indentate(level) + `` + this.newLine; + }else if(key[0] === "?") {//PI tag + return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; + }else{ + let textValue = this.options.tagValueProcessor(key, val); + textValue = this.replaceEntitiesValue(textValue); + + if( textValue === ''){ + return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; + }else{ + return this.indentate(level) + '<' + key + attrStr + '>' + + textValue + + ' 0 && this.options.processEntities){ + for (let i=0; i { + +const EOL = "\n"; + +/** + * + * @param {array} jArray + * @param {any} options + * @returns + */ +function toXml(jArray, options) { + let indentation = ""; + if (options.format && options.indentBy.length > 0) { + indentation = EOL; + } + return arrToStr(jArray, options, "", indentation); +} + +function arrToStr(arr, options, jPath, indentation) { + let xmlStr = ""; + let isPreviousElementTag = false; + + for (let i = 0; i < arr.length; i++) { + const tagObj = arr[i]; + const tagName = propName(tagObj); + if(tagName === undefined) continue; + + let newJPath = ""; + if (jPath.length === 0) newJPath = tagName + else newJPath = `${jPath}.${tagName}`; + + if (tagName === options.textNodeName) { + let tagText = tagObj[tagName]; + if (!isStopNode(newJPath, options)) { + tagText = options.tagValueProcessor(tagName, tagText); + tagText = replaceEntitiesValue(tagText, options); + } + if (isPreviousElementTag) { + xmlStr += indentation; + } + xmlStr += tagText; + isPreviousElementTag = false; + continue; + } else if (tagName === options.cdataPropName) { + if (isPreviousElementTag) { + xmlStr += indentation; + } + xmlStr += ``; + isPreviousElementTag = false; + continue; + } else if (tagName === options.commentPropName) { + xmlStr += indentation + ``; + isPreviousElementTag = true; + continue; + } else if (tagName[0] === "?") { + const attStr = attr_to_str(tagObj[":@"], options); + const tempInd = tagName === "?xml" ? "" : indentation; + let piTextNodeName = tagObj[tagName][0][options.textNodeName]; + piTextNodeName = piTextNodeName.length !== 0 ? " " + piTextNodeName : ""; //remove extra spacing + xmlStr += tempInd + `<${tagName}${piTextNodeName}${attStr}?>`; + isPreviousElementTag = true; + continue; + } + let newIdentation = indentation; + if (newIdentation !== "") { + newIdentation += options.indentBy; + } + const attStr = attr_to_str(tagObj[":@"], options); + const tagStart = indentation + `<${tagName}${attStr}`; + const tagValue = arrToStr(tagObj[tagName], options, newJPath, newIdentation); + if (options.unpairedTags.indexOf(tagName) !== -1) { + if (options.suppressUnpairedNode) xmlStr += tagStart + ">"; + else xmlStr += tagStart + "/>"; + } else if ((!tagValue || tagValue.length === 0) && options.suppressEmptyNode) { + xmlStr += tagStart + "/>"; + } else if (tagValue && tagValue.endsWith(">")) { + xmlStr += tagStart + `>${tagValue}${indentation}`; + } else { + xmlStr += tagStart + ">"; + if (tagValue && indentation !== "" && (tagValue.includes("/>") || tagValue.includes("`; + } + isPreviousElementTag = true; + } + + return xmlStr; +} + +function propName(obj) { + const keys = Object.keys(obj); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + if(!obj.hasOwnProperty(key)) continue; + if (key !== ":@") return key; + } +} + +function attr_to_str(attrMap, options) { + let attrStr = ""; + if (attrMap && !options.ignoreAttributes) { + for (let attr in attrMap) { + if(!attrMap.hasOwnProperty(attr)) continue; + let attrVal = options.attributeValueProcessor(attr, attrMap[attr]); + attrVal = replaceEntitiesValue(attrVal, options); + if (attrVal === true && options.suppressBooleanAttributes) { + attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}`; + } else { + attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}="${attrVal}"`; + } + } + } + return attrStr; +} + +function isStopNode(jPath, options) { + jPath = jPath.substr(0, jPath.length - options.textNodeName.length - 1); + let tagName = jPath.substr(jPath.lastIndexOf(".") + 1); + for (let index in options.stopNodes) { + if (options.stopNodes[index] === jPath || options.stopNodes[index] === "*." + tagName) return true; + } + return false; +} + +function replaceEntitiesValue(textValue, options) { + if (textValue && textValue.length > 0 && options.processEntities) { + for (let i = 0; i < options.entities.length; i++) { + const entity = options.entities[i]; + textValue = textValue.replace(entity.regex, entity.val); + } + } + return textValue; +} +module.exports = toXml; + + +/***/ }), + +/***/ 151: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const util = __nccwpck_require__(7019); + +//TODO: handle comments +function readDocType(xmlData, i){ + + const entities = {}; + if( xmlData[i + 3] === 'O' && + xmlData[i + 4] === 'C' && + xmlData[i + 5] === 'T' && + xmlData[i + 6] === 'Y' && + xmlData[i + 7] === 'P' && + xmlData[i + 8] === 'E') + { + i = i+9; + let angleBracketsCount = 1; + let hasBody = false, comment = false; + let exp = ""; + for(;i') { //Read tag content + if(comment){ + if( xmlData[i - 1] === "-" && xmlData[i - 2] === "-"){ + comment = false; + angleBracketsCount--; + } + }else{ + angleBracketsCount--; + } + if (angleBracketsCount === 0) { + break; + } + }else if( xmlData[i] === '['){ + hasBody = true; + }else{ + exp += xmlData[i]; + } + } + if(angleBracketsCount !== 0){ + throw new Error(`Unclosed DOCTYPE`); + } + }else{ + throw new Error(`Invalid Tag instead of DOCTYPE`); + } + return {entities, i}; +} + +function readEntityExp(xmlData,i){ + //External entities are not supported + // + + //Parameter entities are not supported + // + + //Internal entities are supported + // + + //read EntityName + let entityName = ""; + for (; i < xmlData.length && (xmlData[i] !== "'" && xmlData[i] !== '"' ); i++) { + // if(xmlData[i] === " ") continue; + // else + entityName += xmlData[i]; + } + entityName = entityName.trim(); + if(entityName.indexOf(" ") !== -1) throw new Error("External entites are not supported"); + + //read Entity Value + const startChar = xmlData[i++]; + let val = "" + for (; i < xmlData.length && xmlData[i] !== startChar ; i++) { + val += xmlData[i]; + } + return [entityName, val, i]; +} + +function isComment(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === '-' && + xmlData[i+3] === '-') return true + return false +} +function isEntity(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'E' && + xmlData[i+3] === 'N' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'I' && + xmlData[i+6] === 'T' && + xmlData[i+7] === 'Y') return true + return false +} +function isElement(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'E' && + xmlData[i+3] === 'L' && + xmlData[i+4] === 'E' && + xmlData[i+5] === 'M' && + xmlData[i+6] === 'E' && + xmlData[i+7] === 'N' && + xmlData[i+8] === 'T') return true + return false +} + +function isAttlist(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'A' && + xmlData[i+3] === 'T' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'L' && + xmlData[i+6] === 'I' && + xmlData[i+7] === 'S' && + xmlData[i+8] === 'T') return true + return false +} +function isNotation(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'N' && + xmlData[i+3] === 'O' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'A' && + xmlData[i+6] === 'T' && + xmlData[i+7] === 'I' && + xmlData[i+8] === 'O' && + xmlData[i+9] === 'N') return true + return false +} + +function validateEntityName(name){ + if (util.isName(name)) + return name; + else + throw new Error(`Invalid entity name ${name}`); +} + +module.exports = readDocType; + + +/***/ }), + +/***/ 4769: +/***/ ((__unused_webpack_module, exports) => { + + +const defaultOptions = { + preserveOrder: false, + attributeNamePrefix: '@_', + attributesGroupName: false, + textNodeName: '#text', + ignoreAttributes: true, + removeNSPrefix: false, // remove NS from tag name or attribute name if true + allowBooleanAttributes: false, //a tag can have attributes without any value + //ignoreRootElement : false, + parseTagValue: true, + parseAttributeValue: false, + trimValues: true, //Trim string values of tag and attributes + cdataPropName: false, + numberParseOptions: { + hex: true, + leadingZeros: true, + eNotation: true + }, + tagValueProcessor: function(tagName, val) { + return val; + }, + attributeValueProcessor: function(attrName, val) { + return val; + }, + stopNodes: [], //nested tags will not be parsed even for errors + alwaysCreateTextNode: false, + isArray: () => false, + commentPropName: false, + unpairedTags: [], + processEntities: true, + htmlEntities: false, + ignoreDeclaration: false, + ignorePiTags: false, + transformTagName: false, + transformAttributeName: false, + updateTag: function(tagName, jPath, attrs){ + return tagName + }, + // skipEmptyListItem: false +}; + +const buildOptions = function(options) { + return Object.assign({}, defaultOptions, options); +}; + +exports.buildOptions = buildOptions; +exports.defaultOptions = defaultOptions; + +/***/ }), + +/***/ 3017: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +///@ts-check + +const util = __nccwpck_require__(7019); +const xmlNode = __nccwpck_require__(9307); +const readDocType = __nccwpck_require__(151); +const toNumber = __nccwpck_require__(6496); +const getIgnoreAttributesFn = __nccwpck_require__(812) + +// const regx = +// '<((!\\[CDATA\\[([\\s\\S]*?)(]]>))|((NAME:)?(NAME))([^>]*)>|((\\/)(NAME)\\s*>))([^<]*)' +// .replace(/NAME/g, util.nameRegexp); + +//const tagsRegx = new RegExp("<(\\/?[\\w:\\-\._]+)([^>]*)>(\\s*"+cdataRegx+")*([^<]+)?","g"); +//const tagsRegx = new RegExp("<(\\/?)((\\w*:)?([\\w:\\-\._]+))([^>]*)>([^<]*)("+cdataRegx+"([^<]*))*([^<]+)?","g"); + +class OrderedObjParser{ + constructor(options){ + this.options = options; + this.currentNode = null; + this.tagsNodeStack = []; + this.docTypeEntities = {}; + this.lastEntities = { + "apos" : { regex: /&(apos|#39|#x27);/g, val : "'"}, + "gt" : { regex: /&(gt|#62|#x3E);/g, val : ">"}, + "lt" : { regex: /&(lt|#60|#x3C);/g, val : "<"}, + "quot" : { regex: /&(quot|#34|#x22);/g, val : "\""}, + }; + this.ampEntity = { regex: /&(amp|#38|#x26);/g, val : "&"}; + this.htmlEntities = { + "space": { regex: /&(nbsp|#160);/g, val: " " }, + // "lt" : { regex: /&(lt|#60);/g, val: "<" }, + // "gt" : { regex: /&(gt|#62);/g, val: ">" }, + // "amp" : { regex: /&(amp|#38);/g, val: "&" }, + // "quot" : { regex: /&(quot|#34);/g, val: "\"" }, + // "apos" : { regex: /&(apos|#39);/g, val: "'" }, + "cent" : { regex: /&(cent|#162);/g, val: "¢" }, + "pound" : { regex: /&(pound|#163);/g, val: "£" }, + "yen" : { regex: /&(yen|#165);/g, val: "¥" }, + "euro" : { regex: /&(euro|#8364);/g, val: "€" }, + "copyright" : { regex: /&(copy|#169);/g, val: "©" }, + "reg" : { regex: /&(reg|#174);/g, val: "®" }, + "inr" : { regex: /&(inr|#8377);/g, val: "₹" }, + "num_dec": { regex: /&#([0-9]{1,7});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 10)) }, + "num_hex": { regex: /&#x([0-9a-fA-F]{1,6});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 16)) }, + }; + this.addExternalEntities = addExternalEntities; + this.parseXml = parseXml; + this.parseTextData = parseTextData; + this.resolveNameSpace = resolveNameSpace; + this.buildAttributesMap = buildAttributesMap; + this.isItStopNode = isItStopNode; + this.replaceEntitiesValue = replaceEntitiesValue; + this.readStopNodeData = readStopNodeData; + this.saveTextToParentTag = saveTextToParentTag; + this.addChild = addChild; + this.ignoreAttributesFn = getIgnoreAttributesFn(this.options.ignoreAttributes) + } + +} + +function addExternalEntities(externalEntities){ + const entKeys = Object.keys(externalEntities); + for (let i = 0; i < entKeys.length; i++) { + const ent = entKeys[i]; + this.lastEntities[ent] = { + regex: new RegExp("&"+ent+";","g"), + val : externalEntities[ent] + } + } +} + +/** + * @param {string} val + * @param {string} tagName + * @param {string} jPath + * @param {boolean} dontTrim + * @param {boolean} hasAttributes + * @param {boolean} isLeafNode + * @param {boolean} escapeEntities + */ +function parseTextData(val, tagName, jPath, dontTrim, hasAttributes, isLeafNode, escapeEntities) { + if (val !== undefined) { + if (this.options.trimValues && !dontTrim) { + val = val.trim(); + } + if(val.length > 0){ + if(!escapeEntities) val = this.replaceEntitiesValue(val); + + const newval = this.options.tagValueProcessor(tagName, val, jPath, hasAttributes, isLeafNode); + if(newval === null || newval === undefined){ + //don't parse + return val; + }else if(typeof newval !== typeof val || newval !== val){ + //overwrite + return newval; + }else if(this.options.trimValues){ + return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); + }else{ + const trimmedVal = val.trim(); + if(trimmedVal === val){ + return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); + }else{ + return val; + } + } + } + } +} + +function resolveNameSpace(tagname) { + if (this.options.removeNSPrefix) { + const tags = tagname.split(':'); + const prefix = tagname.charAt(0) === '/' ? '/' : ''; + if (tags[0] === 'xmlns') { + return ''; + } + if (tags.length === 2) { + tagname = prefix + tags[1]; + } + } + return tagname; +} + +//TODO: change regex to capture NS +//const attrsRegx = new RegExp("([\\w\\-\\.\\:]+)\\s*=\\s*(['\"])((.|\n)*?)\\2","gm"); +const attrsRegx = new RegExp('([^\\s=]+)\\s*(=\\s*([\'"])([\\s\\S]*?)\\3)?', 'gm'); + +function buildAttributesMap(attrStr, jPath, tagName) { + if (this.options.ignoreAttributes !== true && typeof attrStr === 'string') { + // attrStr = attrStr.replace(/\r?\n/g, ' '); + //attrStr = attrStr || attrStr.trim(); + + const matches = util.getAllMatches(attrStr, attrsRegx); + const len = matches.length; //don't make it inline + const attrs = {}; + for (let i = 0; i < len; i++) { + const attrName = this.resolveNameSpace(matches[i][1]); + if (this.ignoreAttributesFn(attrName, jPath)) { + continue + } + let oldVal = matches[i][4]; + let aName = this.options.attributeNamePrefix + attrName; + if (attrName.length) { + if (this.options.transformAttributeName) { + aName = this.options.transformAttributeName(aName); + } + if(aName === "__proto__") aName = "#__proto__"; + if (oldVal !== undefined) { + if (this.options.trimValues) { + oldVal = oldVal.trim(); + } + oldVal = this.replaceEntitiesValue(oldVal); + const newVal = this.options.attributeValueProcessor(attrName, oldVal, jPath); + if(newVal === null || newVal === undefined){ + //don't parse + attrs[aName] = oldVal; + }else if(typeof newVal !== typeof oldVal || newVal !== oldVal){ + //overwrite + attrs[aName] = newVal; + }else{ + //parse + attrs[aName] = parseValue( + oldVal, + this.options.parseAttributeValue, + this.options.numberParseOptions + ); + } + } else if (this.options.allowBooleanAttributes) { + attrs[aName] = true; + } + } + } + if (!Object.keys(attrs).length) { + return; + } + if (this.options.attributesGroupName) { + const attrCollection = {}; + attrCollection[this.options.attributesGroupName] = attrs; + return attrCollection; + } + return attrs + } +} + +const parseXml = function(xmlData) { + xmlData = xmlData.replace(/\r\n?/g, "\n"); //TODO: remove this line + const xmlObj = new xmlNode('!xml'); + let currentNode = xmlObj; + let textData = ""; + let jPath = ""; + for(let i=0; i< xmlData.length; i++){//for each char in XML data + const ch = xmlData[i]; + if(ch === '<'){ + // const nextIndex = i+1; + // const _2ndChar = xmlData[nextIndex]; + if( xmlData[i+1] === '/') {//Closing Tag + const closeIndex = findClosingIndex(xmlData, ">", i, "Closing Tag is not closed.") + let tagName = xmlData.substring(i+2,closeIndex).trim(); + + if(this.options.removeNSPrefix){ + const colonIndex = tagName.indexOf(":"); + if(colonIndex !== -1){ + tagName = tagName.substr(colonIndex+1); + } + } + + if(this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + if(currentNode){ + textData = this.saveTextToParentTag(textData, currentNode, jPath); + } + + //check if last tag of nested tag was unpaired tag + const lastTagName = jPath.substring(jPath.lastIndexOf(".")+1); + if(tagName && this.options.unpairedTags.indexOf(tagName) !== -1 ){ + throw new Error(`Unpaired tag can not be used as closing tag: `); + } + let propIndex = 0 + if(lastTagName && this.options.unpairedTags.indexOf(lastTagName) !== -1 ){ + propIndex = jPath.lastIndexOf('.', jPath.lastIndexOf('.')-1) + this.tagsNodeStack.pop(); + }else{ + propIndex = jPath.lastIndexOf("."); + } + jPath = jPath.substring(0, propIndex); + + currentNode = this.tagsNodeStack.pop();//avoid recursion, set the parent tag scope + textData = ""; + i = closeIndex; + } else if( xmlData[i+1] === '?') { + + let tagData = readTagExp(xmlData,i, false, "?>"); + if(!tagData) throw new Error("Pi Tag is not closed."); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + if( (this.options.ignoreDeclaration && tagData.tagName === "?xml") || this.options.ignorePiTags){ + + }else{ + + const childNode = new xmlNode(tagData.tagName); + childNode.add(this.options.textNodeName, ""); + + if(tagData.tagName !== tagData.tagExp && tagData.attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagData.tagExp, jPath, tagData.tagName); + } + this.addChild(currentNode, childNode, jPath) + + } + + + i = tagData.closeIndex + 1; + } else if(xmlData.substr(i + 1, 3) === '!--') { + const endIndex = findClosingIndex(xmlData, "-->", i+4, "Comment is not closed.") + if(this.options.commentPropName){ + const comment = xmlData.substring(i + 4, endIndex - 2); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + + currentNode.add(this.options.commentPropName, [ { [this.options.textNodeName] : comment } ]); + } + i = endIndex; + } else if( xmlData.substr(i + 1, 2) === '!D') { + const result = readDocType(xmlData, i); + this.docTypeEntities = result.entities; + i = result.i; + }else if(xmlData.substr(i + 1, 2) === '![') { + const closeIndex = findClosingIndex(xmlData, "]]>", i, "CDATA is not closed.") - 2; + const tagExp = xmlData.substring(i + 9,closeIndex); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + + let val = this.parseTextData(tagExp, currentNode.tagname, jPath, true, false, true, true); + if(val == undefined) val = ""; + + //cdata should be set even if it is 0 length string + if(this.options.cdataPropName){ + currentNode.add(this.options.cdataPropName, [ { [this.options.textNodeName] : tagExp } ]); + }else{ + currentNode.add(this.options.textNodeName, val); + } + + i = closeIndex + 2; + }else {//Opening tag + let result = readTagExp(xmlData,i, this.options.removeNSPrefix); + let tagName= result.tagName; + const rawTagName = result.rawTagName; + let tagExp = result.tagExp; + let attrExpPresent = result.attrExpPresent; + let closeIndex = result.closeIndex; + + if (this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + //save text as child node + if (currentNode && textData) { + if(currentNode.tagname !== '!xml'){ + //when nested tag is found + textData = this.saveTextToParentTag(textData, currentNode, jPath, false); + } + } + + //check if last tag was unpaired tag + const lastTag = currentNode; + if(lastTag && this.options.unpairedTags.indexOf(lastTag.tagname) !== -1 ){ + currentNode = this.tagsNodeStack.pop(); + jPath = jPath.substring(0, jPath.lastIndexOf(".")); + } + if(tagName !== xmlObj.tagname){ + jPath += jPath ? "." + tagName : tagName; + } + if (this.isItStopNode(this.options.stopNodes, jPath, tagName)) { + let tagContent = ""; + //self-closing tag + if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ + if(tagName[tagName.length - 1] === "/"){ //remove trailing '/' + tagName = tagName.substr(0, tagName.length - 1); + jPath = jPath.substr(0, jPath.length - 1); + tagExp = tagName; + }else{ + tagExp = tagExp.substr(0, tagExp.length - 1); + } + i = result.closeIndex; + } + //unpaired tag + else if(this.options.unpairedTags.indexOf(tagName) !== -1){ + + i = result.closeIndex; + } + //normal tag + else{ + //read until closing tag is found + const result = this.readStopNodeData(xmlData, rawTagName, closeIndex + 1); + if(!result) throw new Error(`Unexpected end of ${rawTagName}`); + i = result.i; + tagContent = result.tagContent; + } + + const childNode = new xmlNode(tagName); + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + if(tagContent) { + tagContent = this.parseTextData(tagContent, tagName, jPath, true, attrExpPresent, true, true); + } + + jPath = jPath.substr(0, jPath.lastIndexOf(".")); + childNode.add(this.options.textNodeName, tagContent); + + this.addChild(currentNode, childNode, jPath) + }else{ + //selfClosing tag + if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ + if(tagName[tagName.length - 1] === "/"){ //remove trailing '/' + tagName = tagName.substr(0, tagName.length - 1); + jPath = jPath.substr(0, jPath.length - 1); + tagExp = tagName; + }else{ + tagExp = tagExp.substr(0, tagExp.length - 1); + } + + if(this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + const childNode = new xmlNode(tagName); + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + this.addChild(currentNode, childNode, jPath) + jPath = jPath.substr(0, jPath.lastIndexOf(".")); + } + //opening tag + else{ + const childNode = new xmlNode( tagName); + this.tagsNodeStack.push(currentNode); + + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + this.addChild(currentNode, childNode, jPath) + currentNode = childNode; + } + textData = ""; + i = closeIndex; + } + } + }else{ + textData += xmlData[i]; + } + } + return xmlObj.child; +} + +function addChild(currentNode, childNode, jPath){ + const result = this.options.updateTag(childNode.tagname, jPath, childNode[":@"]) + if(result === false){ + }else if(typeof result === "string"){ + childNode.tagname = result + currentNode.addChild(childNode); + }else{ + currentNode.addChild(childNode); + } +} + +const replaceEntitiesValue = function(val){ + + if(this.options.processEntities){ + for(let entityName in this.docTypeEntities){ + const entity = this.docTypeEntities[entityName]; + val = val.replace( entity.regx, entity.val); + } + for(let entityName in this.lastEntities){ + const entity = this.lastEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + if(this.options.htmlEntities){ + for(let entityName in this.htmlEntities){ + const entity = this.htmlEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + } + val = val.replace( this.ampEntity.regex, this.ampEntity.val); + } + return val; +} +function saveTextToParentTag(textData, currentNode, jPath, isLeafNode) { + if (textData) { //store previously collected data as textNode + if(isLeafNode === undefined) isLeafNode = currentNode.child.length === 0 + + textData = this.parseTextData(textData, + currentNode.tagname, + jPath, + false, + currentNode[":@"] ? Object.keys(currentNode[":@"]).length !== 0 : false, + isLeafNode); + + if (textData !== undefined && textData !== "") + currentNode.add(this.options.textNodeName, textData); + textData = ""; + } + return textData; +} + +//TODO: use jPath to simplify the logic +/** + * + * @param {string[]} stopNodes + * @param {string} jPath + * @param {string} currentTagName + */ +function isItStopNode(stopNodes, jPath, currentTagName){ + const allNodesExp = "*." + currentTagName; + for (const stopNodePath in stopNodes) { + const stopNodeExp = stopNodes[stopNodePath]; + if( allNodesExp === stopNodeExp || jPath === stopNodeExp ) return true; + } + return false; +} + +/** + * Returns the tag Expression and where it is ending handling single-double quotes situation + * @param {string} xmlData + * @param {number} i starting index + * @returns + */ +function tagExpWithClosingIndex(xmlData, i, closingChar = ">"){ + let attrBoundary; + let tagExp = ""; + for (let index = i; index < xmlData.length; index++) { + let ch = xmlData[index]; + if (attrBoundary) { + if (ch === attrBoundary) attrBoundary = "";//reset + } else if (ch === '"' || ch === "'") { + attrBoundary = ch; + } else if (ch === closingChar[0]) { + if(closingChar[1]){ + if(xmlData[index + 1] === closingChar[1]){ + return { + data: tagExp, + index: index + } + } + }else{ + return { + data: tagExp, + index: index + } + } + } else if (ch === '\t') { + ch = " " + } + tagExp += ch; + } +} + +function findClosingIndex(xmlData, str, i, errMsg){ + const closingIndex = xmlData.indexOf(str, i); + if(closingIndex === -1){ + throw new Error(errMsg) + }else{ + return closingIndex + str.length - 1; + } +} + +function readTagExp(xmlData,i, removeNSPrefix, closingChar = ">"){ + const result = tagExpWithClosingIndex(xmlData, i+1, closingChar); + if(!result) return; + let tagExp = result.data; + const closeIndex = result.index; + const separatorIndex = tagExp.search(/\s/); + let tagName = tagExp; + let attrExpPresent = true; + if(separatorIndex !== -1){//separate tag name and attributes expression + tagName = tagExp.substring(0, separatorIndex); + tagExp = tagExp.substring(separatorIndex + 1).trimStart(); + } + + const rawTagName = tagName; + if(removeNSPrefix){ + const colonIndex = tagName.indexOf(":"); + if(colonIndex !== -1){ + tagName = tagName.substr(colonIndex+1); + attrExpPresent = tagName !== result.data.substr(colonIndex + 1); + } + } + + return { + tagName: tagName, + tagExp: tagExp, + closeIndex: closeIndex, + attrExpPresent: attrExpPresent, + rawTagName: rawTagName, + } +} +/** + * find paired tag for a stop node + * @param {string} xmlData + * @param {string} tagName + * @param {number} i + */ +function readStopNodeData(xmlData, tagName, i){ + const startIndex = i; + // Starting at 1 since we already have an open tag + let openTagCount = 1; + + for (; i < xmlData.length; i++) { + if( xmlData[i] === "<"){ + if (xmlData[i+1] === "/") {//close tag + const closeIndex = findClosingIndex(xmlData, ">", i, `${tagName} is not closed`); + let closeTagName = xmlData.substring(i+2,closeIndex).trim(); + if(closeTagName === tagName){ + openTagCount--; + if (openTagCount === 0) { + return { + tagContent: xmlData.substring(startIndex, i), + i : closeIndex + } + } + } + i=closeIndex; + } else if(xmlData[i+1] === '?') { + const closeIndex = findClosingIndex(xmlData, "?>", i+1, "StopNode is not closed.") + i=closeIndex; + } else if(xmlData.substr(i + 1, 3) === '!--') { + const closeIndex = findClosingIndex(xmlData, "-->", i+3, "StopNode is not closed.") + i=closeIndex; + } else if(xmlData.substr(i + 1, 2) === '![') { + const closeIndex = findClosingIndex(xmlData, "]]>", i, "StopNode is not closed.") - 2; + i=closeIndex; + } else { + const tagData = readTagExp(xmlData, i, '>') + + if (tagData) { + const openTagName = tagData && tagData.tagName; + if (openTagName === tagName && tagData.tagExp[tagData.tagExp.length-1] !== "/") { + openTagCount++; + } + i=tagData.closeIndex; + } + } + } + }//end for loop +} + +function parseValue(val, shouldParse, options) { + if (shouldParse && typeof val === 'string') { + //console.log(options) + const newval = val.trim(); + if(newval === 'true' ) return true; + else if(newval === 'false' ) return false; + else return toNumber(val, options); + } else { + if (util.isExist(val)) { + return val; + } else { + return ''; + } + } +} + + +module.exports = OrderedObjParser; + + +/***/ }), + +/***/ 9844: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const { buildOptions} = __nccwpck_require__(4769); +const OrderedObjParser = __nccwpck_require__(3017); +const { prettify} = __nccwpck_require__(7594); +const validator = __nccwpck_require__(9433); + +class XMLParser{ + + constructor(options){ + this.externalEntities = {}; + this.options = buildOptions(options); + + } + /** + * Parse XML dats to JS object + * @param {string|Buffer} xmlData + * @param {boolean|Object} validationOption + */ + parse(xmlData,validationOption){ + if(typeof xmlData === "string"){ + }else if( xmlData.toString){ + xmlData = xmlData.toString(); + }else{ + throw new Error("XML data is accepted in String or Bytes[] form.") + } + if( validationOption){ + if(validationOption === true) validationOption = {}; //validate with default options + + const result = validator.validate(xmlData, validationOption); + if (result !== true) { + throw Error( `${result.err.msg}:${result.err.line}:${result.err.col}` ) + } + } + const orderedObjParser = new OrderedObjParser(this.options); + orderedObjParser.addExternalEntities(this.externalEntities); + const orderedResult = orderedObjParser.parseXml(xmlData); + if(this.options.preserveOrder || orderedResult === undefined) return orderedResult; + else return prettify(orderedResult, this.options); + } + + /** + * Add Entity which is not by default supported by this library + * @param {string} key + * @param {string} value + */ + addEntity(key, value){ + if(value.indexOf("&") !== -1){ + throw new Error("Entity value can't have '&'") + }else if(key.indexOf("&") !== -1 || key.indexOf(";") !== -1){ + throw new Error("An entity must be set without '&' and ';'. Eg. use '#xD' for ' '") + }else if(value === "&"){ + throw new Error("An entity with value '&' is not permitted"); + }else{ + this.externalEntities[key] = value; + } + } +} + +module.exports = XMLParser; + +/***/ }), + +/***/ 7594: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +/** + * + * @param {array} node + * @param {any} options + * @returns + */ +function prettify(node, options){ + return compress( node, options); +} + +/** + * + * @param {array} arr + * @param {object} options + * @param {string} jPath + * @returns object + */ +function compress(arr, options, jPath){ + let text; + const compressedObj = {}; + for (let i = 0; i < arr.length; i++) { + const tagObj = arr[i]; + const property = propName(tagObj); + let newJpath = ""; + if(jPath === undefined) newJpath = property; + else newJpath = jPath + "." + property; + + if(property === options.textNodeName){ + if(text === undefined) text = tagObj[property]; + else text += "" + tagObj[property]; + }else if(property === undefined){ + continue; + }else if(tagObj[property]){ + + let val = compress(tagObj[property], options, newJpath); + const isLeaf = isLeafTag(val, options); + + if(tagObj[":@"]){ + assignAttributes( val, tagObj[":@"], newJpath, options); + }else if(Object.keys(val).length === 1 && val[options.textNodeName] !== undefined && !options.alwaysCreateTextNode){ + val = val[options.textNodeName]; + }else if(Object.keys(val).length === 0){ + if(options.alwaysCreateTextNode) val[options.textNodeName] = ""; + else val = ""; + } + + if(compressedObj[property] !== undefined && compressedObj.hasOwnProperty(property)) { + if(!Array.isArray(compressedObj[property])) { + compressedObj[property] = [ compressedObj[property] ]; + } + compressedObj[property].push(val); + }else{ + //TODO: if a node is not an array, then check if it should be an array + //also determine if it is a leaf node + if (options.isArray(property, newJpath, isLeaf )) { + compressedObj[property] = [val]; + }else{ + compressedObj[property] = val; + } + } + } + + } + // if(text && text.length > 0) compressedObj[options.textNodeName] = text; + if(typeof text === "string"){ + if(text.length > 0) compressedObj[options.textNodeName] = text; + }else if(text !== undefined) compressedObj[options.textNodeName] = text; + return compressedObj; +} + +function propName(obj){ + const keys = Object.keys(obj); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + if(key !== ":@") return key; + } +} + +function assignAttributes(obj, attrMap, jpath, options){ + if (attrMap) { + const keys = Object.keys(attrMap); + const len = keys.length; //don't make it inline + for (let i = 0; i < len; i++) { + const atrrName = keys[i]; + if (options.isArray(atrrName, jpath + "." + atrrName, true, true)) { + obj[atrrName] = [ attrMap[atrrName] ]; + } else { + obj[atrrName] = attrMap[atrrName]; + } + } + } +} + +function isLeafTag(obj, options){ + const { textNodeName } = options; + const propCount = Object.keys(obj).length; + + if (propCount === 0) { + return true; + } + + if ( + propCount === 1 && + (obj[textNodeName] || typeof obj[textNodeName] === "boolean" || obj[textNodeName] === 0) + ) { + return true; + } + + return false; +} +exports.prettify = prettify; + + +/***/ }), + +/***/ 9307: +/***/ ((module) => { + +"use strict"; + + +class XmlNode{ + constructor(tagname) { + this.tagname = tagname; + this.child = []; //nested tags, text, cdata, comments in order + this[":@"] = {}; //attributes map + } + add(key,val){ + // this.child.push( {name : key, val: val, isCdata: isCdata }); + if(key === "__proto__") key = "#__proto__"; + this.child.push( {[key]: val }); + } + addChild(node) { + if(node.tagname === "__proto__") node.tagname = "#__proto__"; + if(node[":@"] && Object.keys(node[":@"]).length > 0){ + this.child.push( { [node.tagname]: node.child, [":@"]: node[":@"] }); + }else{ + this.child.push( { [node.tagname]: node.child }); + } + }; +}; + + +module.exports = XmlNode; + +/***/ }), + +/***/ 7506: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +var _a; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GaxiosError = exports.GAXIOS_ERROR_SYMBOL = void 0; +exports.defaultErrorRedactor = defaultErrorRedactor; +const url_1 = __nccwpck_require__(7016); +const util_1 = __nccwpck_require__(3155); +const extend_1 = __importDefault(__nccwpck_require__(3860)); +/** + * Support `instanceof` operator for `GaxiosError`s in different versions of this library. + * + * @see {@link GaxiosError[Symbol.hasInstance]} + */ +exports.GAXIOS_ERROR_SYMBOL = Symbol.for(`${util_1.pkg.name}-gaxios-error`); +/* eslint-disable-next-line @typescript-eslint/no-explicit-any */ +class GaxiosError extends Error { + /** + * Support `instanceof` operator for `GaxiosError` across builds/duplicated files. + * + * @see {@link GAXIOS_ERROR_SYMBOL} + * @see {@link GaxiosError[GAXIOS_ERROR_SYMBOL]} + */ + static [(_a = exports.GAXIOS_ERROR_SYMBOL, Symbol.hasInstance)](instance) { + if (instance && + typeof instance === 'object' && + exports.GAXIOS_ERROR_SYMBOL in instance && + instance[exports.GAXIOS_ERROR_SYMBOL] === util_1.pkg.version) { + return true; + } + // fallback to native + return Function.prototype[Symbol.hasInstance].call(GaxiosError, instance); + } + constructor(message, config, response, error) { + var _b; + super(message); + this.config = config; + this.response = response; + this.error = error; + /** + * Support `instanceof` operator for `GaxiosError` across builds/duplicated files. + * + * @see {@link GAXIOS_ERROR_SYMBOL} + * @see {@link GaxiosError[Symbol.hasInstance]} + * @see {@link https://github.com/microsoft/TypeScript/issues/13965#issuecomment-278570200} + * @see {@link https://stackoverflow.com/questions/46618852/require-and-instanceof} + * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/@@hasInstance#reverting_to_default_instanceof_behavior} + */ + this[_a] = util_1.pkg.version; + // deep-copy config as we do not want to mutate + // the existing config for future retries/use + this.config = (0, extend_1.default)(true, {}, config); + if (this.response) { + this.response.config = (0, extend_1.default)(true, {}, this.response.config); + } + if (this.response) { + try { + this.response.data = translateData(this.config.responseType, (_b = this.response) === null || _b === void 0 ? void 0 : _b.data); + } + catch (_c) { + // best effort - don't throw an error within an error + // we could set `this.response.config.responseType = 'unknown'`, but + // that would mutate future calls with this config object. + } + this.status = this.response.status; + } + if (error && 'code' in error && error.code) { + this.code = error.code; + } + if (config.errorRedactor) { + config.errorRedactor({ + config: this.config, + response: this.response, + }); + } + } +} +exports.GaxiosError = GaxiosError; +function translateData(responseType, data) { + switch (responseType) { + case 'stream': + return data; + case 'json': + return JSON.parse(JSON.stringify(data)); + case 'arraybuffer': + return JSON.parse(Buffer.from(data).toString('utf8')); + case 'blob': + return JSON.parse(data.text()); + default: + return data; + } +} +/** + * An experimental error redactor. + * + * @param config Config to potentially redact properties of + * @param response Config to potentially redact properties of + * + * @experimental + */ +function defaultErrorRedactor(data) { + const REDACT = '< - See `errorRedactor` option in `gaxios` for configuration>.'; + function redactHeaders(headers) { + if (!headers) + return; + for (const key of Object.keys(headers)) { + // any casing of `Authentication` + if (/^authentication$/i.test(key)) { + headers[key] = REDACT; + } + // any casing of `Authorization` + if (/^authorization$/i.test(key)) { + headers[key] = REDACT; + } + // anything containing secret, such as 'client secret' + if (/secret/i.test(key)) { + headers[key] = REDACT; + } + } + } + function redactString(obj, key) { + if (typeof obj === 'object' && + obj !== null && + typeof obj[key] === 'string') { + const text = obj[key]; + if (/grant_type=/i.test(text) || + /assertion=/i.test(text) || + /secret/i.test(text)) { + obj[key] = REDACT; + } + } + } + function redactObject(obj) { + if (typeof obj === 'object' && obj !== null) { + if ('grant_type' in obj) { + obj['grant_type'] = REDACT; + } + if ('assertion' in obj) { + obj['assertion'] = REDACT; + } + if ('client_secret' in obj) { + obj['client_secret'] = REDACT; + } + } + } + if (data.config) { + redactHeaders(data.config.headers); + redactString(data.config, 'data'); + redactObject(data.config.data); + redactString(data.config, 'body'); + redactObject(data.config.body); + try { + const url = new url_1.URL('', data.config.url); + if (url.searchParams.has('token')) { + url.searchParams.set('token', REDACT); + } + if (url.searchParams.has('client_secret')) { + url.searchParams.set('client_secret', REDACT); + } + data.config.url = url.toString(); + } + catch (_b) { + // ignore error - no need to parse an invalid URL + } + } + if (data.response) { + defaultErrorRedactor({ config: data.response.config }); + redactHeaders(data.response.headers); + redactString(data.response, 'data'); + redactObject(data.response.data); + } + return data; +} +//# sourceMappingURL=common.js.map + +/***/ }), + +/***/ 6010: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +var _Gaxios_instances, _a, _Gaxios_urlMayUseProxy, _Gaxios_applyRequestInterceptors, _Gaxios_applyResponseInterceptors, _Gaxios_prepareRequest, _Gaxios_proxyAgent, _Gaxios_getProxyAgent; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Gaxios = void 0; +const extend_1 = __importDefault(__nccwpck_require__(3860)); +const https_1 = __nccwpck_require__(5692); +const node_fetch_1 = __importDefault(__nccwpck_require__(6705)); +const querystring_1 = __importDefault(__nccwpck_require__(3480)); +const is_stream_1 = __importDefault(__nccwpck_require__(6543)); +const url_1 = __nccwpck_require__(7016); +const common_1 = __nccwpck_require__(7506); +const retry_1 = __nccwpck_require__(2789); +const stream_1 = __nccwpck_require__(2203); +const uuid_1 = __nccwpck_require__(3187); +const interceptor_1 = __nccwpck_require__(5608); +/* eslint-disable @typescript-eslint/no-explicit-any */ +const fetch = hasFetch() ? window.fetch : node_fetch_1.default; +function hasWindow() { + return typeof window !== 'undefined' && !!window; +} +function hasFetch() { + return hasWindow() && !!window.fetch; +} +function hasBuffer() { + return typeof Buffer !== 'undefined'; +} +function hasHeader(options, header) { + return !!getHeader(options, header); +} +function getHeader(options, header) { + header = header.toLowerCase(); + for (const key of Object.keys((options === null || options === void 0 ? void 0 : options.headers) || {})) { + if (header === key.toLowerCase()) { + return options.headers[key]; + } + } + return undefined; +} +class Gaxios { + /** + * The Gaxios class is responsible for making HTTP requests. + * @param defaults The default set of options to be used for this instance. + */ + constructor(defaults) { + _Gaxios_instances.add(this); + this.agentCache = new Map(); + this.defaults = defaults || {}; + this.interceptors = { + request: new interceptor_1.GaxiosInterceptorManager(), + response: new interceptor_1.GaxiosInterceptorManager(), + }; + } + /** + * Perform an HTTP request with the given options. + * @param opts Set of HTTP options that will be used for this HTTP request. + */ + async request(opts = {}) { + opts = await __classPrivateFieldGet(this, _Gaxios_instances, "m", _Gaxios_prepareRequest).call(this, opts); + opts = await __classPrivateFieldGet(this, _Gaxios_instances, "m", _Gaxios_applyRequestInterceptors).call(this, opts); + return __classPrivateFieldGet(this, _Gaxios_instances, "m", _Gaxios_applyResponseInterceptors).call(this, this._request(opts)); + } + async _defaultAdapter(opts) { + const fetchImpl = opts.fetchImplementation || fetch; + const res = (await fetchImpl(opts.url, opts)); + const data = await this.getResponseData(opts, res); + return this.translateResponse(opts, res, data); + } + /** + * Internal, retryable version of the `request` method. + * @param opts Set of HTTP options that will be used for this HTTP request. + */ + async _request(opts = {}) { + var _b; + try { + let translatedResponse; + if (opts.adapter) { + translatedResponse = await opts.adapter(opts, this._defaultAdapter.bind(this)); + } + else { + translatedResponse = await this._defaultAdapter(opts); + } + if (!opts.validateStatus(translatedResponse.status)) { + if (opts.responseType === 'stream') { + let response = ''; + await new Promise(resolve => { + (translatedResponse === null || translatedResponse === void 0 ? void 0 : translatedResponse.data).on('data', chunk => { + response += chunk; + }); + (translatedResponse === null || translatedResponse === void 0 ? void 0 : translatedResponse.data).on('end', resolve); + }); + translatedResponse.data = response; + } + throw new common_1.GaxiosError(`Request failed with status code ${translatedResponse.status}`, opts, translatedResponse); + } + return translatedResponse; + } + catch (e) { + const err = e instanceof common_1.GaxiosError + ? e + : new common_1.GaxiosError(e.message, opts, undefined, e); + const { shouldRetry, config } = await (0, retry_1.getRetryConfig)(err); + if (shouldRetry && config) { + err.config.retryConfig.currentRetryAttempt = + config.retryConfig.currentRetryAttempt; + // The error's config could be redacted - therefore we only want to + // copy the retry state over to the existing config + opts.retryConfig = (_b = err.config) === null || _b === void 0 ? void 0 : _b.retryConfig; + return this._request(opts); + } + throw err; + } + } + async getResponseData(opts, res) { + switch (opts.responseType) { + case 'stream': + return res.body; + case 'json': { + let data = await res.text(); + try { + data = JSON.parse(data); + } + catch (_b) { + // continue + } + return data; + } + case 'arraybuffer': + return res.arrayBuffer(); + case 'blob': + return res.blob(); + case 'text': + return res.text(); + default: + return this.getResponseDataFromContentType(res); + } + } + /** + * By default, throw for any non-2xx status code + * @param status status code from the HTTP response + */ + validateStatus(status) { + return status >= 200 && status < 300; + } + /** + * Encode a set of key/value pars into a querystring format (?foo=bar&baz=boo) + * @param params key value pars to encode + */ + paramsSerializer(params) { + return querystring_1.default.stringify(params); + } + translateResponse(opts, res, data) { + // headers need to be converted from a map to an obj + const headers = {}; + res.headers.forEach((value, key) => { + headers[key] = value; + }); + return { + config: opts, + data: data, + headers, + status: res.status, + statusText: res.statusText, + // XMLHttpRequestLike + request: { + responseURL: res.url, + }, + }; + } + /** + * Attempts to parse a response by looking at the Content-Type header. + * @param {FetchResponse} response the HTTP response. + * @returns {Promise} a promise that resolves to the response data. + */ + async getResponseDataFromContentType(response) { + let contentType = response.headers.get('Content-Type'); + if (contentType === null) { + // Maintain existing functionality by calling text() + return response.text(); + } + contentType = contentType.toLowerCase(); + if (contentType.includes('application/json')) { + let data = await response.text(); + try { + data = JSON.parse(data); + } + catch (_b) { + // continue + } + return data; + } + else if (contentType.match(/^text\//)) { + return response.text(); + } + else { + // If the content type is something not easily handled, just return the raw data (blob) + return response.blob(); + } + } + /** + * Creates an async generator that yields the pieces of a multipart/related request body. + * This implementation follows the spec: https://www.ietf.org/rfc/rfc2387.txt. However, recursive + * multipart/related requests are not currently supported. + * + * @param {GaxioMultipartOptions[]} multipartOptions the pieces to turn into a multipart/related body. + * @param {string} boundary the boundary string to be placed between each part. + */ + async *getMultipartRequest(multipartOptions, boundary) { + const finale = `--${boundary}--`; + for (const currentPart of multipartOptions) { + const partContentType = currentPart.headers['Content-Type'] || 'application/octet-stream'; + const preamble = `--${boundary}\r\nContent-Type: ${partContentType}\r\n\r\n`; + yield preamble; + if (typeof currentPart.content === 'string') { + yield currentPart.content; + } + else { + yield* currentPart.content; + } + yield '\r\n'; + } + yield finale; + } +} +exports.Gaxios = Gaxios; +_a = Gaxios, _Gaxios_instances = new WeakSet(), _Gaxios_urlMayUseProxy = function _Gaxios_urlMayUseProxy(url, noProxy = []) { + var _b, _c; + const candidate = new url_1.URL(url); + const noProxyList = [...noProxy]; + const noProxyEnvList = ((_c = ((_b = process.env.NO_PROXY) !== null && _b !== void 0 ? _b : process.env.no_proxy)) === null || _c === void 0 ? void 0 : _c.split(',')) || []; + for (const rule of noProxyEnvList) { + noProxyList.push(rule.trim()); + } + for (const rule of noProxyList) { + // Match regex + if (rule instanceof RegExp) { + if (rule.test(candidate.toString())) { + return false; + } + } + // Match URL + else if (rule instanceof url_1.URL) { + if (rule.origin === candidate.origin) { + return false; + } + } + // Match string regex + else if (rule.startsWith('*.') || rule.startsWith('.')) { + const cleanedRule = rule.replace(/^\*\./, '.'); + if (candidate.hostname.endsWith(cleanedRule)) { + return false; + } + } + // Basic string match + else if (rule === candidate.origin || + rule === candidate.hostname || + rule === candidate.href) { + return false; + } + } + return true; +}, _Gaxios_applyRequestInterceptors = +/** + * Applies the request interceptors. The request interceptors are applied after the + * call to prepareRequest is completed. + * + * @param {GaxiosOptions} options The current set of options. + * + * @returns {Promise} Promise that resolves to the set of options or response after interceptors are applied. + */ +async function _Gaxios_applyRequestInterceptors(options) { + let promiseChain = Promise.resolve(options); + for (const interceptor of this.interceptors.request.values()) { + if (interceptor) { + promiseChain = promiseChain.then(interceptor.resolved, interceptor.rejected); + } + } + return promiseChain; +}, _Gaxios_applyResponseInterceptors = +/** + * Applies the response interceptors. The response interceptors are applied after the + * call to request is made. + * + * @param {GaxiosOptions} options The current set of options. + * + * @returns {Promise} Promise that resolves to the set of options or response after interceptors are applied. + */ +async function _Gaxios_applyResponseInterceptors(response) { + let promiseChain = Promise.resolve(response); + for (const interceptor of this.interceptors.response.values()) { + if (interceptor) { + promiseChain = promiseChain.then(interceptor.resolved, interceptor.rejected); + } + } + return promiseChain; +}, _Gaxios_prepareRequest = +/** + * Validates the options, merges them with defaults, and prepare request. + * + * @param options The original options passed from the client. + * @returns Prepared options, ready to make a request + */ +async function _Gaxios_prepareRequest(options) { + var _b, _c, _d, _e; + const opts = (0, extend_1.default)(true, {}, this.defaults, options); + if (!opts.url) { + throw new Error('URL is required.'); + } + // baseUrl has been deprecated, remove in 2.0 + const baseUrl = opts.baseUrl || opts.baseURL; + if (baseUrl) { + opts.url = baseUrl.toString() + opts.url; + } + opts.paramsSerializer = opts.paramsSerializer || this.paramsSerializer; + if (opts.params && Object.keys(opts.params).length > 0) { + let additionalQueryParams = opts.paramsSerializer(opts.params); + if (additionalQueryParams.startsWith('?')) { + additionalQueryParams = additionalQueryParams.slice(1); + } + const prefix = opts.url.toString().includes('?') ? '&' : '?'; + opts.url = opts.url + prefix + additionalQueryParams; + } + if (typeof options.maxContentLength === 'number') { + opts.size = options.maxContentLength; + } + if (typeof options.maxRedirects === 'number') { + opts.follow = options.maxRedirects; + } + opts.headers = opts.headers || {}; + if (opts.multipart === undefined && opts.data) { + const isFormData = typeof FormData === 'undefined' + ? false + : (opts === null || opts === void 0 ? void 0 : opts.data) instanceof FormData; + if (is_stream_1.default.readable(opts.data)) { + opts.body = opts.data; + } + else if (hasBuffer() && Buffer.isBuffer(opts.data)) { + // Do not attempt to JSON.stringify() a Buffer: + opts.body = opts.data; + if (!hasHeader(opts, 'Content-Type')) { + opts.headers['Content-Type'] = 'application/json'; + } + } + else if (typeof opts.data === 'object') { + // If www-form-urlencoded content type has been set, but data is + // provided as an object, serialize the content using querystring: + if (!isFormData) { + if (getHeader(opts, 'content-type') === + 'application/x-www-form-urlencoded') { + opts.body = opts.paramsSerializer(opts.data); + } + else { + // } else if (!(opts.data instanceof FormData)) { + if (!hasHeader(opts, 'Content-Type')) { + opts.headers['Content-Type'] = 'application/json'; + } + opts.body = JSON.stringify(opts.data); + } + } + } + else { + opts.body = opts.data; + } + } + else if (opts.multipart && opts.multipart.length > 0) { + // note: once the minimum version reaches Node 16, + // this can be replaced with randomUUID() function from crypto + // and the dependency on UUID removed + const boundary = (0, uuid_1.v4)(); + opts.headers['Content-Type'] = `multipart/related; boundary=${boundary}`; + const bodyStream = new stream_1.PassThrough(); + opts.body = bodyStream; + (0, stream_1.pipeline)(this.getMultipartRequest(opts.multipart, boundary), bodyStream, () => { }); + } + opts.validateStatus = opts.validateStatus || this.validateStatus; + opts.responseType = opts.responseType || 'unknown'; + if (!opts.headers['Accept'] && opts.responseType === 'json') { + opts.headers['Accept'] = 'application/json'; + } + opts.method = opts.method || 'GET'; + const proxy = opts.proxy || + ((_b = process === null || process === void 0 ? void 0 : process.env) === null || _b === void 0 ? void 0 : _b.HTTPS_PROXY) || + ((_c = process === null || process === void 0 ? void 0 : process.env) === null || _c === void 0 ? void 0 : _c.https_proxy) || + ((_d = process === null || process === void 0 ? void 0 : process.env) === null || _d === void 0 ? void 0 : _d.HTTP_PROXY) || + ((_e = process === null || process === void 0 ? void 0 : process.env) === null || _e === void 0 ? void 0 : _e.http_proxy); + const urlMayUseProxy = __classPrivateFieldGet(this, _Gaxios_instances, "m", _Gaxios_urlMayUseProxy).call(this, opts.url, opts.noProxy); + if (opts.agent) { + // don't do any of the following options - use the user-provided agent. + } + else if (proxy && urlMayUseProxy) { + const HttpsProxyAgent = await __classPrivateFieldGet(_a, _a, "m", _Gaxios_getProxyAgent).call(_a); + if (this.agentCache.has(proxy)) { + opts.agent = this.agentCache.get(proxy); + } + else { + opts.agent = new HttpsProxyAgent(proxy, { + cert: opts.cert, + key: opts.key, + }); + this.agentCache.set(proxy, opts.agent); + } + } + else if (opts.cert && opts.key) { + // Configure client for mTLS + if (this.agentCache.has(opts.key)) { + opts.agent = this.agentCache.get(opts.key); + } + else { + opts.agent = new https_1.Agent({ + cert: opts.cert, + key: opts.key, + }); + this.agentCache.set(opts.key, opts.agent); + } + } + if (typeof opts.errorRedactor !== 'function' && + opts.errorRedactor !== false) { + opts.errorRedactor = common_1.defaultErrorRedactor; + } + return opts; +}, _Gaxios_getProxyAgent = async function _Gaxios_getProxyAgent() { + __classPrivateFieldSet(this, _a, __classPrivateFieldGet(this, _a, "f", _Gaxios_proxyAgent) || (await Promise.resolve().then(() => __importStar(__nccwpck_require__(3669)))).HttpsProxyAgent, "f", _Gaxios_proxyAgent); + return __classPrivateFieldGet(this, _a, "f", _Gaxios_proxyAgent); +}; +/** + * A cache for the lazily-loaded proxy agent. + * + * Should use {@link Gaxios[#getProxyAgent]} to retrieve. + */ +// using `import` to dynamically import the types here +_Gaxios_proxyAgent = { value: void 0 }; +//# sourceMappingURL=gaxios.js.map + +/***/ }), + +/***/ 7003: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.instance = exports.Gaxios = exports.GaxiosError = void 0; +exports.request = request; +const gaxios_1 = __nccwpck_require__(6010); +Object.defineProperty(exports, "Gaxios", ({ enumerable: true, get: function () { return gaxios_1.Gaxios; } })); +var common_1 = __nccwpck_require__(7506); +Object.defineProperty(exports, "GaxiosError", ({ enumerable: true, get: function () { return common_1.GaxiosError; } })); +__exportStar(__nccwpck_require__(5608), exports); +/** + * The default instance used when the `request` method is directly + * invoked. + */ +exports.instance = new gaxios_1.Gaxios(); +/** + * Make an HTTP request using the given options. + * @param opts Options for the request + */ +async function request(opts) { + return exports.instance.request(opts); +} +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 5608: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2024 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GaxiosInterceptorManager = void 0; +/** + * Class to manage collections of GaxiosInterceptors for both requests and responses. + */ +class GaxiosInterceptorManager extends Set { +} +exports.GaxiosInterceptorManager = GaxiosInterceptorManager; +//# sourceMappingURL=interceptor.js.map + +/***/ }), + +/***/ 2789: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getRetryConfig = getRetryConfig; +async function getRetryConfig(err) { + let config = getConfig(err); + if (!err || !err.config || (!config && !err.config.retry)) { + return { shouldRetry: false }; + } + config = config || {}; + config.currentRetryAttempt = config.currentRetryAttempt || 0; + config.retry = + config.retry === undefined || config.retry === null ? 3 : config.retry; + config.httpMethodsToRetry = config.httpMethodsToRetry || [ + 'GET', + 'HEAD', + 'PUT', + 'OPTIONS', + 'DELETE', + ]; + config.noResponseRetries = + config.noResponseRetries === undefined || config.noResponseRetries === null + ? 2 + : config.noResponseRetries; + config.retryDelayMultiplier = config.retryDelayMultiplier + ? config.retryDelayMultiplier + : 2; + config.timeOfFirstRequest = config.timeOfFirstRequest + ? config.timeOfFirstRequest + : Date.now(); + config.totalTimeout = config.totalTimeout + ? config.totalTimeout + : Number.MAX_SAFE_INTEGER; + config.maxRetryDelay = config.maxRetryDelay + ? config.maxRetryDelay + : Number.MAX_SAFE_INTEGER; + // If this wasn't in the list of status codes where we want + // to automatically retry, return. + const retryRanges = [ + // https://en.wikipedia.org/wiki/List_of_HTTP_status_codes + // 1xx - Retry (Informational, request still processing) + // 2xx - Do not retry (Success) + // 3xx - Do not retry (Redirect) + // 4xx - Do not retry (Client errors) + // 408 - Retry ("Request Timeout") + // 429 - Retry ("Too Many Requests") + // 5xx - Retry (Server errors) + [100, 199], + [408, 408], + [429, 429], + [500, 599], + ]; + config.statusCodesToRetry = config.statusCodesToRetry || retryRanges; + // Put the config back into the err + err.config.retryConfig = config; + // Determine if we should retry the request + const shouldRetryFn = config.shouldRetry || shouldRetryRequest; + if (!(await shouldRetryFn(err))) { + return { shouldRetry: false, config: err.config }; + } + const delay = getNextRetryDelay(config); + // We're going to retry! Incremenent the counter. + err.config.retryConfig.currentRetryAttempt += 1; + // Create a promise that invokes the retry after the backOffDelay + const backoff = config.retryBackoff + ? config.retryBackoff(err, delay) + : new Promise(resolve => { + setTimeout(resolve, delay); + }); + // Notify the user if they added an `onRetryAttempt` handler + if (config.onRetryAttempt) { + config.onRetryAttempt(err); + } + // Return the promise in which recalls Gaxios to retry the request + await backoff; + return { shouldRetry: true, config: err.config }; +} +/** + * Determine based on config if we should retry the request. + * @param err The GaxiosError passed to the interceptor. + */ +function shouldRetryRequest(err) { + var _a; + const config = getConfig(err); + // node-fetch raises an AbortError if signaled: + // https://github.com/bitinn/node-fetch#request-cancellation-with-abortsignal + if (err.name === 'AbortError' || ((_a = err.error) === null || _a === void 0 ? void 0 : _a.name) === 'AbortError') { + return false; + } + // If there's no config, or retries are disabled, return. + if (!config || config.retry === 0) { + return false; + } + // Check if this error has no response (ETIMEDOUT, ENOTFOUND, etc) + if (!err.response && + (config.currentRetryAttempt || 0) >= config.noResponseRetries) { + return false; + } + // Only retry with configured HttpMethods. + if (!err.config.method || + config.httpMethodsToRetry.indexOf(err.config.method.toUpperCase()) < 0) { + return false; + } + // If this wasn't in the list of status codes where we want + // to automatically retry, return. + if (err.response && err.response.status) { + let isInRange = false; + for (const [min, max] of config.statusCodesToRetry) { + const status = err.response.status; + if (status >= min && status <= max) { + isInRange = true; + break; + } + } + if (!isInRange) { + return false; + } + } + // If we are out of retry attempts, return + config.currentRetryAttempt = config.currentRetryAttempt || 0; + if (config.currentRetryAttempt >= config.retry) { + return false; + } + return true; +} +/** + * Acquire the raxConfig object from an GaxiosError if available. + * @param err The Gaxios error with a config object. + */ +function getConfig(err) { + if (err && err.config && err.config.retryConfig) { + return err.config.retryConfig; + } + return; +} +/** + * Gets the delay to wait before the next retry. + * + * @param {RetryConfig} config The current set of retry options + * @returns {number} the amount of ms to wait before the next retry attempt. + */ +function getNextRetryDelay(config) { + var _a; + // Calculate time to wait with exponential backoff. + // If this is the first retry, look for a configured retryDelay. + const retryDelay = config.currentRetryAttempt ? 0 : (_a = config.retryDelay) !== null && _a !== void 0 ? _a : 100; + // Formula: retryDelay + ((retryDelayMultiplier^currentRetryAttempt - 1 / 2) * 1000) + const calculatedDelay = retryDelay + + ((Math.pow(config.retryDelayMultiplier, config.currentRetryAttempt) - 1) / + 2) * + 1000; + const maxAllowableDelay = config.totalTimeout - (Date.now() - config.timeOfFirstRequest); + return Math.min(calculatedDelay, maxAllowableDelay, config.maxRetryDelay); +} +//# sourceMappingURL=retry.js.map + +/***/ }), + +/***/ 3155: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2023 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.pkg = void 0; +exports.pkg = __nccwpck_require__(6495); +//# sourceMappingURL=util.js.map + +/***/ }), + +/***/ 3187: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +Object.defineProperty(exports, "NIL", ({ + enumerable: true, + get: function () { + return _nil.default; + } +})); +Object.defineProperty(exports, "parse", ({ + enumerable: true, + get: function () { + return _parse.default; + } +})); +Object.defineProperty(exports, "stringify", ({ + enumerable: true, + get: function () { + return _stringify.default; + } +})); +Object.defineProperty(exports, "v1", ({ + enumerable: true, + get: function () { + return _v.default; + } +})); +Object.defineProperty(exports, "v3", ({ + enumerable: true, + get: function () { + return _v2.default; + } +})); +Object.defineProperty(exports, "v4", ({ + enumerable: true, + get: function () { + return _v3.default; + } +})); +Object.defineProperty(exports, "v5", ({ + enumerable: true, + get: function () { + return _v4.default; + } +})); +Object.defineProperty(exports, "validate", ({ + enumerable: true, + get: function () { + return _validate.default; + } +})); +Object.defineProperty(exports, "version", ({ + enumerable: true, + get: function () { + return _version.default; + } +})); + +var _v = _interopRequireDefault(__nccwpck_require__(6698)); + +var _v2 = _interopRequireDefault(__nccwpck_require__(6272)); + +var _v3 = _interopRequireDefault(__nccwpck_require__(8485)); + +var _v4 = _interopRequireDefault(__nccwpck_require__(742)); + +var _nil = _interopRequireDefault(__nccwpck_require__(8212)); + +var _version = _interopRequireDefault(__nccwpck_require__(6651)); + +var _validate = _interopRequireDefault(__nccwpck_require__(5913)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(4386)); + +var _parse = _interopRequireDefault(__nccwpck_require__(9884)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/***/ }), + +/***/ 1979: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); +} + +var _default = md5; +exports["default"] = _default; + +/***/ }), + +/***/ 3968: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var _default = { + randomUUID: _crypto.default.randomUUID +}; +exports["default"] = _default; + +/***/ }), + +/***/ 8212: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports["default"] = _default; + +/***/ }), + +/***/ 9884: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(5913)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports["default"] = _default; + +/***/ }), + +/***/ 956: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports["default"] = _default; + +/***/ }), + +/***/ 4566: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = rng; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; + +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); + + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} + +/***/ }), + +/***/ 5398: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('sha1').update(bytes).digest(); +} + +var _default = sha1; +exports["default"] = _default; + +/***/ }), + +/***/ 4386: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +exports.unsafeStringify = unsafeStringify; + +var _validate = _interopRequireDefault(__nccwpck_require__(5913)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).slice(1)); +} + +function unsafeStringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]; +} + +function stringify(arr, offset = 0) { + const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports["default"] = _default; + +/***/ }), + +/***/ 6698: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(4566)); + +var _stringify = __nccwpck_require__(4386); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.unsafeStringify)(b); +} + +var _default = v1; +exports["default"] = _default; + +/***/ }), + +/***/ 6272: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(2781)); + +var _md = _interopRequireDefault(__nccwpck_require__(1979)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports["default"] = _default; + +/***/ }), + +/***/ 2781: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports.URL = exports.DNS = void 0; +exports["default"] = v35; + +var _stringify = __nccwpck_require__(4386); + +var _parse = _interopRequireDefault(__nccwpck_require__(9884)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function v35(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + var _namespace; + + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} + +/***/ }), + +/***/ 8485: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _native = _interopRequireDefault(__nccwpck_require__(3968)); + +var _rng = _interopRequireDefault(__nccwpck_require__(4566)); + +var _stringify = __nccwpck_require__(4386); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + if (_native.default.randomUUID && !buf && !options) { + return _native.default.randomUUID(); + } + + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(rnds); +} + +var _default = v4; +exports["default"] = _default; + +/***/ }), + +/***/ 742: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(2781)); + +var _sha = _interopRequireDefault(__nccwpck_require__(5398)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports["default"] = _default; + +/***/ }), + +/***/ 5913: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _regex = _interopRequireDefault(__nccwpck_require__(956)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports["default"] = _default; + +/***/ }), + +/***/ 6651: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(5913)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.slice(14, 15), 16); +} + +var _default = version; +exports["default"] = _default; + +/***/ }), + +/***/ 381: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GCE_LINUX_BIOS_PATHS = void 0; +exports.isGoogleCloudServerless = isGoogleCloudServerless; +exports.isGoogleComputeEngineLinux = isGoogleComputeEngineLinux; +exports.isGoogleComputeEngineMACAddress = isGoogleComputeEngineMACAddress; +exports.isGoogleComputeEngine = isGoogleComputeEngine; +exports.detectGCPResidency = detectGCPResidency; +const fs_1 = __nccwpck_require__(9896); +const os_1 = __nccwpck_require__(857); +/** + * Known paths unique to Google Compute Engine Linux instances + */ +exports.GCE_LINUX_BIOS_PATHS = { + BIOS_DATE: '/sys/class/dmi/id/bios_date', + BIOS_VENDOR: '/sys/class/dmi/id/bios_vendor', +}; +const GCE_MAC_ADDRESS_REGEX = /^42:01/; +/** + * Determines if the process is running on a Google Cloud Serverless environment (Cloud Run or Cloud Functions instance). + * + * Uses the: + * - {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. + * - {@link https://cloud.google.com/functions/docs/env-var Cloud Functions environment variables}. + * + * @returns {boolean} `true` if the process is running on GCP serverless, `false` otherwise. + */ +function isGoogleCloudServerless() { + /** + * `CLOUD_RUN_JOB` is used for Cloud Run Jobs + * - See {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. + * + * `FUNCTION_NAME` is used in older Cloud Functions environments: + * - See {@link https://cloud.google.com/functions/docs/env-var Python 3.7 and Go 1.11}. + * + * `K_SERVICE` is used in Cloud Run and newer Cloud Functions environments: + * - See {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. + * - See {@link https://cloud.google.com/functions/docs/env-var Cloud Functions newer runtimes}. + */ + const isGFEnvironment = process.env.CLOUD_RUN_JOB || + process.env.FUNCTION_NAME || + process.env.K_SERVICE; + return !!isGFEnvironment; +} +/** + * Determines if the process is running on a Linux Google Compute Engine instance. + * + * @returns {boolean} `true` if the process is running on Linux GCE, `false` otherwise. + */ +function isGoogleComputeEngineLinux() { + if ((0, os_1.platform)() !== 'linux') + return false; + try { + // ensure this file exist + (0, fs_1.statSync)(exports.GCE_LINUX_BIOS_PATHS.BIOS_DATE); + // ensure this file exist and matches + const biosVendor = (0, fs_1.readFileSync)(exports.GCE_LINUX_BIOS_PATHS.BIOS_VENDOR, 'utf8'); + return /Google/.test(biosVendor); + } + catch (_a) { + return false; + } +} +/** + * Determines if the process is running on a Google Compute Engine instance with a known + * MAC address. + * + * @returns {boolean} `true` if the process is running on GCE (as determined by MAC address), `false` otherwise. + */ +function isGoogleComputeEngineMACAddress() { + const interfaces = (0, os_1.networkInterfaces)(); + for (const item of Object.values(interfaces)) { + if (!item) + continue; + for (const { mac } of item) { + if (GCE_MAC_ADDRESS_REGEX.test(mac)) { + return true; + } + } + } + return false; +} +/** + * Determines if the process is running on a Google Compute Engine instance. + * + * @returns {boolean} `true` if the process is running on GCE, `false` otherwise. + */ +function isGoogleComputeEngine() { + return isGoogleComputeEngineLinux() || isGoogleComputeEngineMACAddress(); +} +/** + * Determines if the process is running on Google Cloud Platform. + * + * @returns {boolean} `true` if the process is running on GCP, `false` otherwise. + */ +function detectGCPResidency() { + return isGoogleCloudServerless() || isGoogleComputeEngine(); +} +//# sourceMappingURL=gcp-residency.js.map + +/***/ }), + +/***/ 3046: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +/** + * Copyright 2018 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.gcpResidencyCache = exports.METADATA_SERVER_DETECTION = exports.HEADERS = exports.HEADER_VALUE = exports.HEADER_NAME = exports.SECONDARY_HOST_ADDRESS = exports.HOST_ADDRESS = exports.BASE_PATH = void 0; +exports.instance = instance; +exports.project = project; +exports.universe = universe; +exports.bulk = bulk; +exports.isAvailable = isAvailable; +exports.resetIsAvailableCache = resetIsAvailableCache; +exports.getGCPResidency = getGCPResidency; +exports.setGCPResidency = setGCPResidency; +exports.requestTimeout = requestTimeout; +const gaxios_1 = __nccwpck_require__(7003); +const jsonBigint = __nccwpck_require__(4826); +const gcp_residency_1 = __nccwpck_require__(381); +const logger = __nccwpck_require__(1577); +exports.BASE_PATH = '/computeMetadata/v1'; +exports.HOST_ADDRESS = 'http://169.254.169.254'; +exports.SECONDARY_HOST_ADDRESS = 'http://metadata.google.internal.'; +exports.HEADER_NAME = 'Metadata-Flavor'; +exports.HEADER_VALUE = 'Google'; +exports.HEADERS = Object.freeze({ [exports.HEADER_NAME]: exports.HEADER_VALUE }); +const log = logger.log('gcp metadata'); +/** + * Metadata server detection override options. + * + * Available via `process.env.METADATA_SERVER_DETECTION`. + */ +exports.METADATA_SERVER_DETECTION = Object.freeze({ + 'assume-present': "don't try to ping the metadata server, but assume it's present", + none: "don't try to ping the metadata server, but don't try to use it either", + 'bios-only': "treat the result of a BIOS probe as canonical (don't fall back to pinging)", + 'ping-only': 'skip the BIOS probe, and go straight to pinging', +}); +/** + * Returns the base URL while taking into account the GCE_METADATA_HOST + * environment variable if it exists. + * + * @returns The base URL, e.g., http://169.254.169.254/computeMetadata/v1. + */ +function getBaseUrl(baseUrl) { + if (!baseUrl) { + baseUrl = + process.env.GCE_METADATA_IP || + process.env.GCE_METADATA_HOST || + exports.HOST_ADDRESS; + } + // If no scheme is provided default to HTTP: + if (!/^https?:\/\//.test(baseUrl)) { + baseUrl = `http://${baseUrl}`; + } + return new URL(exports.BASE_PATH, baseUrl).href; +} +// Accepts an options object passed from the user to the API. In previous +// versions of the API, it referred to a `Request` or an `Axios` request +// options object. Now it refers to an object with very limited property +// names. This is here to help ensure users don't pass invalid options when +// they upgrade from 0.4 to 0.5 to 0.8. +function validate(options) { + Object.keys(options).forEach(key => { + switch (key) { + case 'params': + case 'property': + case 'headers': + break; + case 'qs': + throw new Error("'qs' is not a valid configuration option. Please use 'params' instead."); + default: + throw new Error(`'${key}' is not a valid configuration option.`); + } + }); +} +async function metadataAccessor(type, options = {}, noResponseRetries = 3, fastFail = false) { + let metadataKey = ''; + let params = {}; + let headers = {}; + if (typeof type === 'object') { + const metadataAccessor = type; + metadataKey = metadataAccessor.metadataKey; + params = metadataAccessor.params || params; + headers = metadataAccessor.headers || headers; + noResponseRetries = metadataAccessor.noResponseRetries || noResponseRetries; + fastFail = metadataAccessor.fastFail || fastFail; + } + else { + metadataKey = type; + } + if (typeof options === 'string') { + metadataKey += `/${options}`; + } + else { + validate(options); + if (options.property) { + metadataKey += `/${options.property}`; + } + headers = options.headers || headers; + params = options.params || params; + } + const requestMethod = fastFail ? fastFailMetadataRequest : gaxios_1.request; + const req = { + url: `${getBaseUrl()}/${metadataKey}`, + headers: { ...exports.HEADERS, ...headers }, + retryConfig: { noResponseRetries }, + params, + responseType: 'text', + timeout: requestTimeout(), + }; + log.info('instance request %j', req); + const res = await requestMethod(req); + log.info('instance metadata is %s', res.data); + // NOTE: node.js converts all incoming headers to lower case. + if (res.headers[exports.HEADER_NAME.toLowerCase()] !== exports.HEADER_VALUE) { + throw new Error(`Invalid response from metadata service: incorrect ${exports.HEADER_NAME} header. Expected '${exports.HEADER_VALUE}', got ${res.headers[exports.HEADER_NAME.toLowerCase()] ? `'${res.headers[exports.HEADER_NAME.toLowerCase()]}'` : 'no header'}`); + } + if (typeof res.data === 'string') { + try { + return jsonBigint.parse(res.data); + } + catch (_a) { + /* ignore */ + } + } + return res.data; +} +async function fastFailMetadataRequest(options) { + var _a; + const secondaryOptions = { + ...options, + url: (_a = options.url) === null || _a === void 0 ? void 0 : _a.toString().replace(getBaseUrl(), getBaseUrl(exports.SECONDARY_HOST_ADDRESS)), + }; + // We race a connection between DNS/IP to metadata server. There are a couple + // reasons for this: + // + // 1. the DNS is slow in some GCP environments; by checking both, we might + // detect the runtime environment signficantly faster. + // 2. we can't just check the IP, which is tarpitted and slow to respond + // on a user's local machine. + // + // Additional logic has been added to make sure that we don't create an + // unhandled rejection in scenarios where a failure happens sometime + // after a success. + // + // Note, however, if a failure happens prior to a success, a rejection should + // occur, this is for folks running locally. + // + let responded = false; + const r1 = (0, gaxios_1.request)(options) + .then(res => { + responded = true; + return res; + }) + .catch(err => { + if (responded) { + return r2; + } + else { + responded = true; + throw err; + } + }); + const r2 = (0, gaxios_1.request)(secondaryOptions) + .then(res => { + responded = true; + return res; + }) + .catch(err => { + if (responded) { + return r1; + } + else { + responded = true; + throw err; + } + }); + return Promise.race([r1, r2]); +} +/** + * Obtain metadata for the current GCE instance. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const serviceAccount: {} = await instance('service-accounts/'); + * const serviceAccountEmail: string = await instance('service-accounts/default/email'); + * ``` + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function instance(options) { + return metadataAccessor('instance', options); +} +/** + * Obtain metadata for the current GCP project. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const projectId: string = await project('project-id'); + * const numericProjectId: number = await project('numeric-project-id'); + * ``` + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function project(options) { + return metadataAccessor('project', options); +} +/** + * Obtain metadata for the current universe. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const universeDomain: string = await universe('universe-domain'); + * ``` + */ +function universe(options) { + return metadataAccessor('universe', options); +} +/** + * Retrieve metadata items in parallel. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const data = await bulk([ + * { + * metadataKey: 'instance', + * }, + * { + * metadataKey: 'project/project-id', + * }, + * ] as const); + * + * // data.instance; + * // data['project/project-id']; + * ``` + * + * @param properties The metadata properties to retrieve + * @returns The metadata in `metadatakey:value` format + */ +async function bulk(properties) { + const r = {}; + await Promise.all(properties.map(item => { + return (async () => { + const res = await metadataAccessor(item); + const key = item.metadataKey; + r[key] = res; + })(); + })); + return r; +} +/* + * How many times should we retry detecting GCP environment. + */ +function detectGCPAvailableRetries() { + return process.env.DETECT_GCP_RETRIES + ? Number(process.env.DETECT_GCP_RETRIES) + : 0; +} +let cachedIsAvailableResponse; +/** + * Determine if the metadata server is currently available. + */ +async function isAvailable() { + if (process.env.METADATA_SERVER_DETECTION) { + const value = process.env.METADATA_SERVER_DETECTION.trim().toLocaleLowerCase(); + if (!(value in exports.METADATA_SERVER_DETECTION)) { + throw new RangeError(`Unknown \`METADATA_SERVER_DETECTION\` env variable. Got \`${value}\`, but it should be \`${Object.keys(exports.METADATA_SERVER_DETECTION).join('`, `')}\`, or unset`); + } + switch (value) { + case 'assume-present': + return true; + case 'none': + return false; + case 'bios-only': + return getGCPResidency(); + case 'ping-only': + // continue, we want to ping the server + } + } + try { + // If a user is instantiating several GCP libraries at the same time, + // this may result in multiple calls to isAvailable(), to detect the + // runtime environment. We use the same promise for each of these calls + // to reduce the network load. + if (cachedIsAvailableResponse === undefined) { + cachedIsAvailableResponse = metadataAccessor('instance', undefined, detectGCPAvailableRetries(), + // If the default HOST_ADDRESS has been overridden, we should not + // make an effort to try SECONDARY_HOST_ADDRESS (as we are likely in + // a non-GCP environment): + !(process.env.GCE_METADATA_IP || process.env.GCE_METADATA_HOST)); + } + await cachedIsAvailableResponse; + return true; + } + catch (e) { + const err = e; + if (process.env.DEBUG_AUTH) { + console.info(err); + } + if (err.type === 'request-timeout') { + // If running in a GCP environment, metadata endpoint should return + // within ms. + return false; + } + if (err.response && err.response.status === 404) { + return false; + } + else { + if (!(err.response && err.response.status === 404) && + // A warning is emitted if we see an unexpected err.code, or err.code + // is not populated: + (!err.code || + ![ + 'EHOSTDOWN', + 'EHOSTUNREACH', + 'ENETUNREACH', + 'ENOENT', + 'ENOTFOUND', + 'ECONNREFUSED', + ].includes(err.code))) { + let code = 'UNKNOWN'; + if (err.code) + code = err.code; + process.emitWarning(`received unexpected error = ${err.message} code = ${code}`, 'MetadataLookupWarning'); + } + // Failure to resolve the metadata service means that it is not available. + return false; + } + } +} +/** + * reset the memoized isAvailable() lookup. + */ +function resetIsAvailableCache() { + cachedIsAvailableResponse = undefined; +} +/** + * A cache for the detected GCP Residency. + */ +exports.gcpResidencyCache = null; +/** + * Detects GCP Residency. + * Caches results to reduce costs for subsequent calls. + * + * @see setGCPResidency for setting + */ +function getGCPResidency() { + if (exports.gcpResidencyCache === null) { + setGCPResidency(); + } + return exports.gcpResidencyCache; +} +/** + * Sets the detected GCP Residency. + * Useful for forcing metadata server detection behavior. + * + * Set `null` to autodetect the environment (default behavior). + * @see getGCPResidency for getting + */ +function setGCPResidency(value = null) { + exports.gcpResidencyCache = value !== null ? value : (0, gcp_residency_1.detectGCPResidency)(); +} +/** + * Obtain the timeout for requests to the metadata server. + * + * In certain environments and conditions requests can take longer than + * the default timeout to complete. This function will determine the + * appropriate timeout based on the environment. + * + * @returns {number} a request timeout duration in milliseconds. + */ +function requestTimeout() { + return getGCPResidency() ? 0 : 3000; +} +__exportStar(__nccwpck_require__(381), exports); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 4810: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2012 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AuthClient = exports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS = exports.DEFAULT_UNIVERSE = void 0; +const events_1 = __nccwpck_require__(4434); +const gaxios_1 = __nccwpck_require__(7003); +const transporters_1 = __nccwpck_require__(7633); +const util_1 = __nccwpck_require__(7870); +/** + * The default cloud universe + * + * @see {@link AuthJSONOptions.universe_domain} + */ +exports.DEFAULT_UNIVERSE = 'googleapis.com'; +/** + * The default {@link AuthClientOptions.eagerRefreshThresholdMillis} + */ +exports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS = 5 * 60 * 1000; +class AuthClient extends events_1.EventEmitter { + constructor(opts = {}) { + var _a, _b, _c, _d, _e; + super(); + this.credentials = {}; + this.eagerRefreshThresholdMillis = exports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS; + this.forceRefreshOnFailure = false; + this.universeDomain = exports.DEFAULT_UNIVERSE; + const options = (0, util_1.originalOrCamelOptions)(opts); + // Shared auth options + this.apiKey = opts.apiKey; + this.projectId = (_a = options.get('project_id')) !== null && _a !== void 0 ? _a : null; + this.quotaProjectId = options.get('quota_project_id'); + this.credentials = (_b = options.get('credentials')) !== null && _b !== void 0 ? _b : {}; + this.universeDomain = (_c = options.get('universe_domain')) !== null && _c !== void 0 ? _c : exports.DEFAULT_UNIVERSE; + // Shared client options + this.transporter = (_d = opts.transporter) !== null && _d !== void 0 ? _d : new transporters_1.DefaultTransporter(); + if (opts.transporterOptions) { + this.transporter.defaults = opts.transporterOptions; + } + if (opts.eagerRefreshThresholdMillis) { + this.eagerRefreshThresholdMillis = opts.eagerRefreshThresholdMillis; + } + this.forceRefreshOnFailure = (_e = opts.forceRefreshOnFailure) !== null && _e !== void 0 ? _e : false; + } + /** + * Return the {@link Gaxios `Gaxios`} instance from the {@link AuthClient.transporter}. + * + * @expiremental + */ + get gaxios() { + if (this.transporter instanceof gaxios_1.Gaxios) { + return this.transporter; + } + else if (this.transporter instanceof transporters_1.DefaultTransporter) { + return this.transporter.instance; + } + else if ('instance' in this.transporter && + this.transporter.instance instanceof gaxios_1.Gaxios) { + return this.transporter.instance; + } + return null; + } + /** + * Sets the auth credentials. + */ + setCredentials(credentials) { + this.credentials = credentials; + } + /** + * Append additional headers, e.g., x-goog-user-project, shared across the + * classes inheriting AuthClient. This method should be used by any method + * that overrides getRequestMetadataAsync(), which is a shared helper for + * setting request information in both gRPC and HTTP API calls. + * + * @param headers object to append additional headers to. + */ + addSharedMetadataHeaders(headers) { + // quota_project_id, stored in application_default_credentials.json, is set in + // the x-goog-user-project header, to indicate an alternate account for + // billing and quota: + if (!headers['x-goog-user-project'] && // don't override a value the user sets. + this.quotaProjectId) { + headers['x-goog-user-project'] = this.quotaProjectId; + } + return headers; + } + /** + * Retry config for Auth-related requests. + * + * @remarks + * + * This is not a part of the default {@link AuthClient.transporter transporter/gaxios} + * config as some downstream APIs would prefer if customers explicitly enable retries, + * such as GCS. + */ + static get RETRY_CONFIG() { + return { + retry: true, + retryConfig: { + httpMethodsToRetry: ['GET', 'PUT', 'POST', 'HEAD', 'OPTIONS', 'DELETE'], + }, + }; + } +} +exports.AuthClient = AuthClient; + + +/***/ }), + +/***/ 1261: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _a, _AwsClient_DEFAULT_AWS_REGIONAL_CREDENTIAL_VERIFICATION_URL; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AwsClient = void 0; +const awsrequestsigner_1 = __nccwpck_require__(7647); +const baseexternalclient_1 = __nccwpck_require__(142); +const defaultawssecuritycredentialssupplier_1 = __nccwpck_require__(9157); +const util_1 = __nccwpck_require__(7870); +/** + * AWS external account client. This is used for AWS workloads, where + * AWS STS GetCallerIdentity serialized signed requests are exchanged for + * GCP access token. + */ +class AwsClient extends baseexternalclient_1.BaseExternalAccountClient { + /** + * Instantiates an AwsClient instance using the provided JSON + * object loaded from an external account credentials file. + * An error is thrown if the credential is not a valid AWS credential. + * @param options The external account options object typically loaded + * from the external account JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + super(options, additionalOptions); + const opts = (0, util_1.originalOrCamelOptions)(options); + const credentialSource = opts.get('credential_source'); + const awsSecurityCredentialsSupplier = opts.get('aws_security_credentials_supplier'); + // Validate credential sourcing configuration. + if (!credentialSource && !awsSecurityCredentialsSupplier) { + throw new Error('A credential source or AWS security credentials supplier must be specified.'); + } + if (credentialSource && awsSecurityCredentialsSupplier) { + throw new Error('Only one of credential source or AWS security credentials supplier can be specified.'); + } + if (awsSecurityCredentialsSupplier) { + this.awsSecurityCredentialsSupplier = awsSecurityCredentialsSupplier; + this.regionalCredVerificationUrl = + __classPrivateFieldGet(_a, _a, "f", _AwsClient_DEFAULT_AWS_REGIONAL_CREDENTIAL_VERIFICATION_URL); + this.credentialSourceType = 'programmatic'; + } + else { + const credentialSourceOpts = (0, util_1.originalOrCamelOptions)(credentialSource); + this.environmentId = credentialSourceOpts.get('environment_id'); + // This is only required if the AWS region is not available in the + // AWS_REGION or AWS_DEFAULT_REGION environment variables. + const regionUrl = credentialSourceOpts.get('region_url'); + // This is only required if AWS security credentials are not available in + // environment variables. + const securityCredentialsUrl = credentialSourceOpts.get('url'); + const imdsV2SessionTokenUrl = credentialSourceOpts.get('imdsv2_session_token_url'); + this.awsSecurityCredentialsSupplier = + new defaultawssecuritycredentialssupplier_1.DefaultAwsSecurityCredentialsSupplier({ + regionUrl: regionUrl, + securityCredentialsUrl: securityCredentialsUrl, + imdsV2SessionTokenUrl: imdsV2SessionTokenUrl, + }); + this.regionalCredVerificationUrl = credentialSourceOpts.get('regional_cred_verification_url'); + this.credentialSourceType = 'aws'; + // Data validators. + this.validateEnvironmentId(); + } + this.awsRequestSigner = null; + this.region = ''; + } + validateEnvironmentId() { + var _b; + const match = (_b = this.environmentId) === null || _b === void 0 ? void 0 : _b.match(/^(aws)(\d+)$/); + if (!match || !this.regionalCredVerificationUrl) { + throw new Error('No valid AWS "credential_source" provided'); + } + else if (parseInt(match[2], 10) !== 1) { + throw new Error(`aws version "${match[2]}" is not supported in the current build.`); + } + } + /** + * Triggered when an external subject token is needed to be exchanged for a + * GCP access token via GCP STS endpoint. This will call the + * {@link AwsSecurityCredentialsSupplier} to retrieve an AWS region and AWS + * Security Credentials, then use them to create a signed AWS STS request that + * can be exchanged for a GCP access token. + * @return A promise that resolves with the external subject token. + */ + async retrieveSubjectToken() { + // Initialize AWS request signer if not already initialized. + if (!this.awsRequestSigner) { + this.region = await this.awsSecurityCredentialsSupplier.getAwsRegion(this.supplierContext); + this.awsRequestSigner = new awsrequestsigner_1.AwsRequestSigner(async () => { + return this.awsSecurityCredentialsSupplier.getAwsSecurityCredentials(this.supplierContext); + }, this.region); + } + // Generate signed request to AWS STS GetCallerIdentity API. + // Use the required regional endpoint. Otherwise, the request will fail. + const options = await this.awsRequestSigner.getRequestOptions({ + ..._a.RETRY_CONFIG, + url: this.regionalCredVerificationUrl.replace('{region}', this.region), + method: 'POST', + }); + // The GCP STS endpoint expects the headers to be formatted as: + // [ + // {key: 'x-amz-date', value: '...'}, + // {key: 'Authorization', value: '...'}, + // ... + // ] + // And then serialized as: + // encodeURIComponent(JSON.stringify({ + // url: '...', + // method: 'POST', + // headers: [{key: 'x-amz-date', value: '...'}, ...] + // })) + const reformattedHeader = []; + const extendedHeaders = Object.assign({ + // The full, canonical resource name of the workload identity pool + // provider, with or without the HTTPS prefix. + // Including this header as part of the signature is recommended to + // ensure data integrity. + 'x-goog-cloud-target-resource': this.audience, + }, options.headers); + // Reformat header to GCP STS expected format. + for (const key in extendedHeaders) { + reformattedHeader.push({ + key, + value: extendedHeaders[key], + }); + } + // Serialize the reformatted signed request. + return encodeURIComponent(JSON.stringify({ + url: options.url, + method: options.method, + headers: reformattedHeader, + })); + } +} +exports.AwsClient = AwsClient; +_a = AwsClient; +_AwsClient_DEFAULT_AWS_REGIONAL_CREDENTIAL_VERIFICATION_URL = { value: 'https://sts.{region}.amazonaws.com?Action=GetCallerIdentity&Version=2011-06-15' }; +/** + * @deprecated AWS client no validates the EC2 metadata address. + **/ +AwsClient.AWS_EC2_METADATA_IPV4_ADDRESS = '169.254.169.254'; +/** + * @deprecated AWS client no validates the EC2 metadata address. + **/ +AwsClient.AWS_EC2_METADATA_IPV6_ADDRESS = 'fd00:ec2::254'; + + +/***/ }), + +/***/ 7647: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AwsRequestSigner = void 0; +const crypto_1 = __nccwpck_require__(8851); +/** AWS Signature Version 4 signing algorithm identifier. */ +const AWS_ALGORITHM = 'AWS4-HMAC-SHA256'; +/** + * The termination string for the AWS credential scope value as defined in + * https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html + */ +const AWS_REQUEST_TYPE = 'aws4_request'; +/** + * Implements an AWS API request signer based on the AWS Signature Version 4 + * signing process. + * https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html + */ +class AwsRequestSigner { + /** + * Instantiates an AWS API request signer used to send authenticated signed + * requests to AWS APIs based on the AWS Signature Version 4 signing process. + * This also provides a mechanism to generate the signed request without + * sending it. + * @param getCredentials A mechanism to retrieve AWS security credentials + * when needed. + * @param region The AWS region to use. + */ + constructor(getCredentials, region) { + this.getCredentials = getCredentials; + this.region = region; + this.crypto = (0, crypto_1.createCrypto)(); + } + /** + * Generates the signed request for the provided HTTP request for calling + * an AWS API. This follows the steps described at: + * https://docs.aws.amazon.com/general/latest/gr/sigv4_signing.html + * @param amzOptions The AWS request options that need to be signed. + * @return A promise that resolves with the GaxiosOptions containing the + * signed HTTP request parameters. + */ + async getRequestOptions(amzOptions) { + if (!amzOptions.url) { + throw new Error('"url" is required in "amzOptions"'); + } + // Stringify JSON requests. This will be set in the request body of the + // generated signed request. + const requestPayloadData = typeof amzOptions.data === 'object' + ? JSON.stringify(amzOptions.data) + : amzOptions.data; + const url = amzOptions.url; + const method = amzOptions.method || 'GET'; + const requestPayload = amzOptions.body || requestPayloadData; + const additionalAmzHeaders = amzOptions.headers; + const awsSecurityCredentials = await this.getCredentials(); + const uri = new URL(url); + const headerMap = await generateAuthenticationHeaderMap({ + crypto: this.crypto, + host: uri.host, + canonicalUri: uri.pathname, + canonicalQuerystring: uri.search.substr(1), + method, + region: this.region, + securityCredentials: awsSecurityCredentials, + requestPayload, + additionalAmzHeaders, + }); + // Append additional optional headers, eg. X-Amz-Target, Content-Type, etc. + const headers = Object.assign( + // Add x-amz-date if available. + headerMap.amzDate ? { 'x-amz-date': headerMap.amzDate } : {}, { + Authorization: headerMap.authorizationHeader, + host: uri.host, + }, additionalAmzHeaders || {}); + if (awsSecurityCredentials.token) { + Object.assign(headers, { + 'x-amz-security-token': awsSecurityCredentials.token, + }); + } + const awsSignedReq = { + url, + method: method, + headers, + }; + if (typeof requestPayload !== 'undefined') { + awsSignedReq.body = requestPayload; + } + return awsSignedReq; + } +} +exports.AwsRequestSigner = AwsRequestSigner; +/** + * Creates the HMAC-SHA256 hash of the provided message using the + * provided key. + * + * @param crypto The crypto instance used to facilitate cryptographic + * operations. + * @param key The HMAC-SHA256 key to use. + * @param msg The message to hash. + * @return The computed hash bytes. + */ +async function sign(crypto, key, msg) { + return await crypto.signWithHmacSha256(key, msg); +} +/** + * Calculates the signing key used to calculate the signature for + * AWS Signature Version 4 based on: + * https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html + * + * @param crypto The crypto instance used to facilitate cryptographic + * operations. + * @param key The AWS secret access key. + * @param dateStamp The '%Y%m%d' date format. + * @param region The AWS region. + * @param serviceName The AWS service name, eg. sts. + * @return The signing key bytes. + */ +async function getSigningKey(crypto, key, dateStamp, region, serviceName) { + const kDate = await sign(crypto, `AWS4${key}`, dateStamp); + const kRegion = await sign(crypto, kDate, region); + const kService = await sign(crypto, kRegion, serviceName); + const kSigning = await sign(crypto, kService, 'aws4_request'); + return kSigning; +} +/** + * Generates the authentication header map needed for generating the AWS + * Signature Version 4 signed request. + * + * @param option The options needed to compute the authentication header map. + * @return The AWS authentication header map which constitutes of the following + * components: amz-date, authorization header and canonical query string. + */ +async function generateAuthenticationHeaderMap(options) { + const additionalAmzHeaders = options.additionalAmzHeaders || {}; + const requestPayload = options.requestPayload || ''; + // iam.amazonaws.com host => iam service. + // sts.us-east-2.amazonaws.com => sts service. + const serviceName = options.host.split('.')[0]; + const now = new Date(); + // Format: '%Y%m%dT%H%M%SZ'. + const amzDate = now + .toISOString() + .replace(/[-:]/g, '') + .replace(/\.[0-9]+/, ''); + // Format: '%Y%m%d'. + const dateStamp = now.toISOString().replace(/[-]/g, '').replace(/T.*/, ''); + // Change all additional headers to be lower case. + const reformattedAdditionalAmzHeaders = {}; + Object.keys(additionalAmzHeaders).forEach(key => { + reformattedAdditionalAmzHeaders[key.toLowerCase()] = + additionalAmzHeaders[key]; + }); + // Add AWS token if available. + if (options.securityCredentials.token) { + reformattedAdditionalAmzHeaders['x-amz-security-token'] = + options.securityCredentials.token; + } + // Header keys need to be sorted alphabetically. + const amzHeaders = Object.assign({ + host: options.host, + }, + // Previously the date was not fixed with x-amz- and could be provided manually. + // https://github.com/boto/botocore/blob/879f8440a4e9ace5d3cf145ce8b3d5e5ffb892ef/tests/unit/auth/aws4_testsuite/get-header-value-trim.req + reformattedAdditionalAmzHeaders.date ? {} : { 'x-amz-date': amzDate }, reformattedAdditionalAmzHeaders); + let canonicalHeaders = ''; + const signedHeadersList = Object.keys(amzHeaders).sort(); + signedHeadersList.forEach(key => { + canonicalHeaders += `${key}:${amzHeaders[key]}\n`; + }); + const signedHeaders = signedHeadersList.join(';'); + const payloadHash = await options.crypto.sha256DigestHex(requestPayload); + // https://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html + const canonicalRequest = `${options.method}\n` + + `${options.canonicalUri}\n` + + `${options.canonicalQuerystring}\n` + + `${canonicalHeaders}\n` + + `${signedHeaders}\n` + + `${payloadHash}`; + const credentialScope = `${dateStamp}/${options.region}/${serviceName}/${AWS_REQUEST_TYPE}`; + // https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html + const stringToSign = `${AWS_ALGORITHM}\n` + + `${amzDate}\n` + + `${credentialScope}\n` + + (await options.crypto.sha256DigestHex(canonicalRequest)); + // https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html + const signingKey = await getSigningKey(options.crypto, options.securityCredentials.secretAccessKey, dateStamp, options.region, serviceName); + const signature = await sign(options.crypto, signingKey, stringToSign); + // https://docs.aws.amazon.com/general/latest/gr/sigv4-add-signature-to-request.html + const authorizationHeader = `${AWS_ALGORITHM} Credential=${options.securityCredentials.accessKeyId}/` + + `${credentialScope}, SignedHeaders=${signedHeaders}, ` + + `Signature=${(0, crypto_1.fromArrayBufferToHex)(signature)}`; + return { + // Do not return x-amz-date if date is available. + amzDate: reformattedAdditionalAmzHeaders.date ? undefined : amzDate, + authorizationHeader, + canonicalQuerystring: options.canonicalQuerystring, + }; +} + + +/***/ }), + +/***/ 142: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var _BaseExternalAccountClient_instances, _BaseExternalAccountClient_pendingAccessToken, _BaseExternalAccountClient_internalRefreshAccessTokenAsync; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.BaseExternalAccountClient = exports.DEFAULT_UNIVERSE = exports.CLOUD_RESOURCE_MANAGER = exports.EXTERNAL_ACCOUNT_TYPE = exports.EXPIRATION_TIME_OFFSET = void 0; +const stream = __nccwpck_require__(2203); +const authclient_1 = __nccwpck_require__(4810); +const sts = __nccwpck_require__(121); +const util_1 = __nccwpck_require__(7870); +/** + * The required token exchange grant_type: rfc8693#section-2.1 + */ +const STS_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:token-exchange'; +/** + * The requested token exchange requested_token_type: rfc8693#section-2.1 + */ +const STS_REQUEST_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; +/** The default OAuth scope to request when none is provided. */ +const DEFAULT_OAUTH_SCOPE = 'https://www.googleapis.com/auth/cloud-platform'; +/** Default impersonated token lifespan in seconds.*/ +const DEFAULT_TOKEN_LIFESPAN = 3600; +/** + * Offset to take into account network delays and server clock skews. + */ +exports.EXPIRATION_TIME_OFFSET = 5 * 60 * 1000; +/** + * The credentials JSON file type for external account clients. + * There are 3 types of JSON configs: + * 1. authorized_user => Google end user credential + * 2. service_account => Google service account credential + * 3. external_Account => non-GCP service (eg. AWS, Azure, K8s) + */ +exports.EXTERNAL_ACCOUNT_TYPE = 'external_account'; +/** + * Cloud resource manager URL used to retrieve project information. + * + * @deprecated use {@link BaseExternalAccountClient.cloudResourceManagerURL} instead + **/ +exports.CLOUD_RESOURCE_MANAGER = 'https://cloudresourcemanager.googleapis.com/v1/projects/'; +/** The workforce audience pattern. */ +const WORKFORCE_AUDIENCE_PATTERN = '//iam\\.googleapis\\.com/locations/[^/]+/workforcePools/[^/]+/providers/.+'; +const DEFAULT_TOKEN_URL = 'https://sts.{universeDomain}/v1/token'; +// eslint-disable-next-line @typescript-eslint/no-var-requires +const pkg = __nccwpck_require__(6066); +/** + * For backwards compatibility. + */ +var authclient_2 = __nccwpck_require__(4810); +Object.defineProperty(exports, "DEFAULT_UNIVERSE", ({ enumerable: true, get: function () { return authclient_2.DEFAULT_UNIVERSE; } })); +/** + * Base external account client. This is used to instantiate AuthClients for + * exchanging external account credentials for GCP access token and authorizing + * requests to GCP APIs. + * The base class implements common logic for exchanging various type of + * external credentials for GCP access token. The logic of determining and + * retrieving the external credential based on the environment and + * credential_source will be left for the subclasses. + */ +class BaseExternalAccountClient extends authclient_1.AuthClient { + /** + * Instantiate a BaseExternalAccountClient instance using the provided JSON + * object loaded from an external account credentials file. + * @param options The external account options object typically loaded + * from the external account JSON credential file. The camelCased options + * are aliases for the snake_cased options. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + var _a; + super({ ...options, ...additionalOptions }); + _BaseExternalAccountClient_instances.add(this); + /** + * A pending access token request. Used for concurrent calls. + */ + _BaseExternalAccountClient_pendingAccessToken.set(this, null); + const opts = (0, util_1.originalOrCamelOptions)(options); + const type = opts.get('type'); + if (type && type !== exports.EXTERNAL_ACCOUNT_TYPE) { + throw new Error(`Expected "${exports.EXTERNAL_ACCOUNT_TYPE}" type but ` + + `received "${options.type}"`); + } + const clientId = opts.get('client_id'); + const clientSecret = opts.get('client_secret'); + const tokenUrl = (_a = opts.get('token_url')) !== null && _a !== void 0 ? _a : DEFAULT_TOKEN_URL.replace('{universeDomain}', this.universeDomain); + const subjectTokenType = opts.get('subject_token_type'); + const workforcePoolUserProject = opts.get('workforce_pool_user_project'); + const serviceAccountImpersonationUrl = opts.get('service_account_impersonation_url'); + const serviceAccountImpersonation = opts.get('service_account_impersonation'); + const serviceAccountImpersonationLifetime = (0, util_1.originalOrCamelOptions)(serviceAccountImpersonation).get('token_lifetime_seconds'); + this.cloudResourceManagerURL = new URL(opts.get('cloud_resource_manager_url') || + `https://cloudresourcemanager.${this.universeDomain}/v1/projects/`); + if (clientId) { + this.clientAuth = { + confidentialClientType: 'basic', + clientId, + clientSecret, + }; + } + this.stsCredential = new sts.StsCredentials(tokenUrl, this.clientAuth); + this.scopes = opts.get('scopes') || [DEFAULT_OAUTH_SCOPE]; + this.cachedAccessToken = null; + this.audience = opts.get('audience'); + this.subjectTokenType = subjectTokenType; + this.workforcePoolUserProject = workforcePoolUserProject; + const workforceAudiencePattern = new RegExp(WORKFORCE_AUDIENCE_PATTERN); + if (this.workforcePoolUserProject && + !this.audience.match(workforceAudiencePattern)) { + throw new Error('workforcePoolUserProject should not be set for non-workforce pool ' + + 'credentials.'); + } + this.serviceAccountImpersonationUrl = serviceAccountImpersonationUrl; + this.serviceAccountImpersonationLifetime = + serviceAccountImpersonationLifetime; + if (this.serviceAccountImpersonationLifetime) { + this.configLifetimeRequested = true; + } + else { + this.configLifetimeRequested = false; + this.serviceAccountImpersonationLifetime = DEFAULT_TOKEN_LIFESPAN; + } + this.projectNumber = this.getProjectNumber(this.audience); + this.supplierContext = { + audience: this.audience, + subjectTokenType: this.subjectTokenType, + transporter: this.transporter, + }; + } + /** The service account email to be impersonated, if available. */ + getServiceAccountEmail() { + var _a; + if (this.serviceAccountImpersonationUrl) { + if (this.serviceAccountImpersonationUrl.length > 256) { + /** + * Prevents DOS attacks. + * @see {@link https://github.com/googleapis/google-auth-library-nodejs/security/code-scanning/84} + **/ + throw new RangeError(`URL is too long: ${this.serviceAccountImpersonationUrl}`); + } + // Parse email from URL. The formal looks as follows: + // https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/name@project-id.iam.gserviceaccount.com:generateAccessToken + const re = /serviceAccounts\/(?[^:]+):generateAccessToken$/; + const result = re.exec(this.serviceAccountImpersonationUrl); + return ((_a = result === null || result === void 0 ? void 0 : result.groups) === null || _a === void 0 ? void 0 : _a.email) || null; + } + return null; + } + /** + * Provides a mechanism to inject GCP access tokens directly. + * When the provided credential expires, a new credential, using the + * external account options, is retrieved. + * @param credentials The Credentials object to set on the current client. + */ + setCredentials(credentials) { + super.setCredentials(credentials); + this.cachedAccessToken = credentials; + } + /** + * @return A promise that resolves with the current GCP access token + * response. If the current credential is expired, a new one is retrieved. + */ + async getAccessToken() { + // If cached access token is unavailable or expired, force refresh. + if (!this.cachedAccessToken || this.isExpired(this.cachedAccessToken)) { + await this.refreshAccessTokenAsync(); + } + // Return GCP access token in GetAccessTokenResponse format. + return { + token: this.cachedAccessToken.access_token, + res: this.cachedAccessToken.res, + }; + } + /** + * The main authentication interface. It takes an optional url which when + * present is the endpoint being accessed, and returns a Promise which + * resolves with authorization header fields. + * + * The result has the form: + * { Authorization: 'Bearer ' } + */ + async getRequestHeaders() { + const accessTokenResponse = await this.getAccessToken(); + const headers = { + Authorization: `Bearer ${accessTokenResponse.token}`, + }; + return this.addSharedMetadataHeaders(headers); + } + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); + }); + } + else { + return this.requestAsync(opts); + } + } + /** + * @return A promise that resolves with the project ID corresponding to the + * current workload identity pool or current workforce pool if + * determinable. For workforce pool credential, it returns the project ID + * corresponding to the workforcePoolUserProject. + * This is introduced to match the current pattern of using the Auth + * library: + * const projectId = await auth.getProjectId(); + * const url = `https://dns.googleapis.com/dns/v1/projects/${projectId}`; + * const res = await client.request({ url }); + * The resource may not have permission + * (resourcemanager.projects.get) to call this API or the required + * scopes may not be selected: + * https://cloud.google.com/resource-manager/reference/rest/v1/projects/get#authorization-scopes + */ + async getProjectId() { + const projectNumber = this.projectNumber || this.workforcePoolUserProject; + if (this.projectId) { + // Return previously determined project ID. + return this.projectId; + } + else if (projectNumber) { + // Preferable not to use request() to avoid retrial policies. + const headers = await this.getRequestHeaders(); + const response = await this.transporter.request({ + ...BaseExternalAccountClient.RETRY_CONFIG, + headers, + url: `${this.cloudResourceManagerURL.toString()}${projectNumber}`, + responseType: 'json', + }); + this.projectId = response.data.projectId; + return this.projectId; + } + return null; + } + /** + * Authenticates the provided HTTP request, processes it and resolves with the + * returned response. + * @param opts The HTTP request options. + * @param reAuthRetried Whether the current attempt is a retry after a failed attempt due to an auth failure. + * @return A promise that resolves with the successful response. + */ + async requestAsync(opts, reAuthRetried = false) { + let response; + try { + const requestHeaders = await this.getRequestHeaders(); + opts.headers = opts.headers || {}; + if (requestHeaders && requestHeaders['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = + requestHeaders['x-goog-user-project']; + } + if (requestHeaders && requestHeaders.Authorization) { + opts.headers.Authorization = requestHeaders.Authorization; + } + response = await this.transporter.request(opts); + } + catch (e) { + const res = e.response; + if (res) { + const statusCode = res.status; + // Retry the request for metadata if the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - forceRefreshOnFailure is true + const isReadableStream = res.config.data instanceof stream.Readable; + const isAuthErr = statusCode === 401 || statusCode === 403; + if (!reAuthRetried && + isAuthErr && + !isReadableStream && + this.forceRefreshOnFailure) { + await this.refreshAccessTokenAsync(); + return await this.requestAsync(opts, true); + } + } + throw e; + } + return response; + } + /** + * Forces token refresh, even if unexpired tokens are currently cached. + * External credentials are exchanged for GCP access tokens via the token + * exchange endpoint and other settings provided in the client options + * object. + * If the service_account_impersonation_url is provided, an additional + * step to exchange the external account GCP access token for a service + * account impersonated token is performed. + * @return A promise that resolves with the fresh GCP access tokens. + */ + async refreshAccessTokenAsync() { + // Use an existing access token request, or cache a new one + __classPrivateFieldSet(this, _BaseExternalAccountClient_pendingAccessToken, __classPrivateFieldGet(this, _BaseExternalAccountClient_pendingAccessToken, "f") || __classPrivateFieldGet(this, _BaseExternalAccountClient_instances, "m", _BaseExternalAccountClient_internalRefreshAccessTokenAsync).call(this), "f"); + try { + return await __classPrivateFieldGet(this, _BaseExternalAccountClient_pendingAccessToken, "f"); + } + finally { + // clear pending access token for future requests + __classPrivateFieldSet(this, _BaseExternalAccountClient_pendingAccessToken, null, "f"); + } + } + /** + * Returns the workload identity pool project number if it is determinable + * from the audience resource name. + * @param audience The STS audience used to determine the project number. + * @return The project number associated with the workload identity pool, if + * this can be determined from the STS audience field. Otherwise, null is + * returned. + */ + getProjectNumber(audience) { + // STS audience pattern: + // //iam.googleapis.com/projects/$PROJECT_NUMBER/locations/... + const match = audience.match(/\/projects\/([^/]+)/); + if (!match) { + return null; + } + return match[1]; + } + /** + * Exchanges an external account GCP access token for a service + * account impersonated access token using iamcredentials + * GenerateAccessToken API. + * @param token The access token to exchange for a service account access + * token. + * @return A promise that resolves with the service account impersonated + * credentials response. + */ + async getImpersonatedAccessToken(token) { + const opts = { + ...BaseExternalAccountClient.RETRY_CONFIG, + url: this.serviceAccountImpersonationUrl, + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}`, + }, + data: { + scope: this.getScopesArray(), + lifetime: this.serviceAccountImpersonationLifetime + 's', + }, + responseType: 'json', + }; + const response = await this.transporter.request(opts); + const successResponse = response.data; + return { + access_token: successResponse.accessToken, + // Convert from ISO format to timestamp. + expiry_date: new Date(successResponse.expireTime).getTime(), + res: response, + }; + } + /** + * Returns whether the provided credentials are expired or not. + * If there is no expiry time, assumes the token is not expired or expiring. + * @param accessToken The credentials to check for expiration. + * @return Whether the credentials are expired or not. + */ + isExpired(accessToken) { + const now = new Date().getTime(); + return accessToken.expiry_date + ? now >= accessToken.expiry_date - this.eagerRefreshThresholdMillis + : false; + } + /** + * @return The list of scopes for the requested GCP access token. + */ + getScopesArray() { + // Since scopes can be provided as string or array, the type should + // be normalized. + if (typeof this.scopes === 'string') { + return [this.scopes]; + } + return this.scopes || [DEFAULT_OAUTH_SCOPE]; + } + getMetricsHeaderValue() { + const nodeVersion = process.version.replace(/^v/, ''); + const saImpersonation = this.serviceAccountImpersonationUrl !== undefined; + const credentialSourceType = this.credentialSourceType + ? this.credentialSourceType + : 'unknown'; + return `gl-node/${nodeVersion} auth/${pkg.version} google-byoid-sdk source/${credentialSourceType} sa-impersonation/${saImpersonation} config-lifetime/${this.configLifetimeRequested}`; + } +} +exports.BaseExternalAccountClient = BaseExternalAccountClient; +_BaseExternalAccountClient_pendingAccessToken = new WeakMap(), _BaseExternalAccountClient_instances = new WeakSet(), _BaseExternalAccountClient_internalRefreshAccessTokenAsync = async function _BaseExternalAccountClient_internalRefreshAccessTokenAsync() { + // Retrieve the external credential. + const subjectToken = await this.retrieveSubjectToken(); + // Construct the STS credentials options. + const stsCredentialsOptions = { + grantType: STS_GRANT_TYPE, + audience: this.audience, + requestedTokenType: STS_REQUEST_TOKEN_TYPE, + subjectToken, + subjectTokenType: this.subjectTokenType, + // generateAccessToken requires the provided access token to have + // scopes: + // https://www.googleapis.com/auth/iam or + // https://www.googleapis.com/auth/cloud-platform + // The new service account access token scopes will match the user + // provided ones. + scope: this.serviceAccountImpersonationUrl + ? [DEFAULT_OAUTH_SCOPE] + : this.getScopesArray(), + }; + // Exchange the external credentials for a GCP access token. + // Client auth is prioritized over passing the workforcePoolUserProject + // parameter for STS token exchange. + const additionalOptions = !this.clientAuth && this.workforcePoolUserProject + ? { userProject: this.workforcePoolUserProject } + : undefined; + const additionalHeaders = { + 'x-goog-api-client': this.getMetricsHeaderValue(), + }; + const stsResponse = await this.stsCredential.exchangeToken(stsCredentialsOptions, additionalHeaders, additionalOptions); + if (this.serviceAccountImpersonationUrl) { + this.cachedAccessToken = await this.getImpersonatedAccessToken(stsResponse.access_token); + } + else if (stsResponse.expires_in) { + // Save response in cached access token. + this.cachedAccessToken = { + access_token: stsResponse.access_token, + expiry_date: new Date().getTime() + stsResponse.expires_in * 1000, + res: stsResponse.res, + }; + } + else { + // Save response in cached access token. + this.cachedAccessToken = { + access_token: stsResponse.access_token, + res: stsResponse.res, + }; + } + // Save credentials. + this.credentials = {}; + Object.assign(this.credentials, this.cachedAccessToken); + delete this.credentials.res; + // Trigger tokens event to notify external listeners. + this.emit('tokens', { + refresh_token: null, + expiry_date: this.cachedAccessToken.expiry_date, + access_token: this.cachedAccessToken.access_token, + token_type: 'Bearer', + id_token: null, + }); + // Return the cached access token. + return this.cachedAccessToken; +}; + + +/***/ }), + +/***/ 977: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2013 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Compute = void 0; +const gaxios_1 = __nccwpck_require__(7003); +const gcpMetadata = __nccwpck_require__(3046); +const oauth2client_1 = __nccwpck_require__(91); +class Compute extends oauth2client_1.OAuth2Client { + /** + * Google Compute Engine service account credentials. + * + * Retrieve access token from the metadata server. + * See: https://cloud.google.com/compute/docs/access/authenticate-workloads#applications + */ + constructor(options = {}) { + super(options); + // Start with an expired refresh token, which will automatically be + // refreshed before the first API call is made. + this.credentials = { expiry_date: 1, refresh_token: 'compute-placeholder' }; + this.serviceAccountEmail = options.serviceAccountEmail || 'default'; + this.scopes = Array.isArray(options.scopes) + ? options.scopes + : options.scopes + ? [options.scopes] + : []; + } + /** + * Refreshes the access token. + * @param refreshToken Unused parameter + */ + async refreshTokenNoCache( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + refreshToken) { + const tokenPath = `service-accounts/${this.serviceAccountEmail}/token`; + let data; + try { + const instanceOptions = { + property: tokenPath, + }; + if (this.scopes.length > 0) { + instanceOptions.params = { + scopes: this.scopes.join(','), + }; + } + data = await gcpMetadata.instance(instanceOptions); + } + catch (e) { + if (e instanceof gaxios_1.GaxiosError) { + e.message = `Could not refresh access token: ${e.message}`; + this.wrapError(e); + } + throw e; + } + const tokens = data; + if (data && data.expires_in) { + tokens.expiry_date = new Date().getTime() + data.expires_in * 1000; + delete tokens.expires_in; + } + this.emit('tokens', tokens); + return { tokens, res: null }; + } + /** + * Fetches an ID token. + * @param targetAudience the audience for the fetched ID token. + */ + async fetchIdToken(targetAudience) { + const idTokenPath = `service-accounts/${this.serviceAccountEmail}/identity` + + `?format=full&audience=${targetAudience}`; + let idToken; + try { + const instanceOptions = { + property: idTokenPath, + }; + idToken = await gcpMetadata.instance(instanceOptions); + } + catch (e) { + if (e instanceof Error) { + e.message = `Could not fetch ID token: ${e.message}`; + } + throw e; + } + return idToken; + } + wrapError(e) { + const res = e.response; + if (res && res.status) { + e.status = res.status; + if (res.status === 403) { + e.message = + 'A Forbidden error was returned while attempting to retrieve an access ' + + 'token for the Compute Engine built-in service account. This may be because the Compute ' + + 'Engine instance does not have the correct permission scopes specified: ' + + e.message; + } + else if (res.status === 404) { + e.message = + 'A Not Found error was returned while attempting to retrieve an access' + + 'token for the Compute Engine built-in service account. This may be because the Compute ' + + 'Engine instance does not have any permission scopes specified: ' + + e.message; + } + } + } +} +exports.Compute = Compute; + + +/***/ }), + +/***/ 9157: +/***/ (function(__unused_webpack_module, exports) { + +"use strict"; + +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _DefaultAwsSecurityCredentialsSupplier_instances, _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken, _DefaultAwsSecurityCredentialsSupplier_getAwsRoleName, _DefaultAwsSecurityCredentialsSupplier_retrieveAwsSecurityCredentials, _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get, _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DefaultAwsSecurityCredentialsSupplier = void 0; +/** + * Internal AWS security credentials supplier implementation used by {@link AwsClient} + * when a credential source is provided instead of a user defined supplier. + * The logic is summarized as: + * 1. If imdsv2_session_token_url is provided in the credential source, then + * fetch the aws session token and include it in the headers of the + * metadata requests. This is a requirement for IDMSv2 but optional + * for IDMSv1. + * 2. Retrieve AWS region from availability-zone. + * 3a. Check AWS credentials in environment variables. If not found, get + * from security-credentials endpoint. + * 3b. Get AWS credentials from security-credentials endpoint. In order + * to retrieve this, the AWS role needs to be determined by calling + * security-credentials endpoint without any argument. Then the + * credentials can be retrieved via: security-credentials/role_name + * 4. Generate the signed request to AWS STS GetCallerIdentity action. + * 5. Inject x-goog-cloud-target-resource into header and serialize the + * signed request. This will be the subject-token to pass to GCP STS. + */ +class DefaultAwsSecurityCredentialsSupplier { + /** + * Instantiates a new DefaultAwsSecurityCredentialsSupplier using information + * from the credential_source stored in the ADC file. + * @param opts The default aws security credentials supplier options object to + * build the supplier with. + */ + constructor(opts) { + _DefaultAwsSecurityCredentialsSupplier_instances.add(this); + this.regionUrl = opts.regionUrl; + this.securityCredentialsUrl = opts.securityCredentialsUrl; + this.imdsV2SessionTokenUrl = opts.imdsV2SessionTokenUrl; + this.additionalGaxiosOptions = opts.additionalGaxiosOptions; + } + /** + * Returns the active AWS region. This first checks to see if the region + * is available as an environment variable. If it is not, then the supplier + * will call the region URL. + * @param context {@link ExternalAccountSupplierContext} from the calling + * {@link AwsClient}, contains the requested audience and subject token type + * for the external account identity. + * @return A promise that resolves with the AWS region string. + */ + async getAwsRegion(context) { + // Priority order for region determination: + // AWS_REGION > AWS_DEFAULT_REGION > metadata server. + if (__classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get)) { + return __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get); + } + const metadataHeaders = {}; + if (!__classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get) && this.imdsV2SessionTokenUrl) { + metadataHeaders['x-aws-ec2-metadata-token'] = + await __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "m", _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken).call(this, context.transporter); + } + if (!this.regionUrl) { + throw new Error('Unable to determine AWS region due to missing ' + + '"options.credential_source.region_url"'); + } + const opts = { + ...this.additionalGaxiosOptions, + url: this.regionUrl, + method: 'GET', + responseType: 'text', + headers: metadataHeaders, + }; + const response = await context.transporter.request(opts); + // Remove last character. For example, if us-east-2b is returned, + // the region would be us-east-2. + return response.data.substr(0, response.data.length - 1); + } + /** + * Returns AWS security credentials. This first checks to see if the credentials + * is available as environment variables. If it is not, then the supplier + * will call the security credentials URL. + * @param context {@link ExternalAccountSupplierContext} from the calling + * {@link AwsClient}, contains the requested audience and subject token type + * for the external account identity. + * @return A promise that resolves with the AWS security credentials. + */ + async getAwsSecurityCredentials(context) { + // Check environment variables for permanent credentials first. + // https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html + if (__classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get)) { + return __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get); + } + const metadataHeaders = {}; + if (this.imdsV2SessionTokenUrl) { + metadataHeaders['x-aws-ec2-metadata-token'] = + await __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "m", _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken).call(this, context.transporter); + } + // Since the role on a VM can change, we don't need to cache it. + const roleName = await __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "m", _DefaultAwsSecurityCredentialsSupplier_getAwsRoleName).call(this, metadataHeaders, context.transporter); + // Temporary credentials typically last for several hours. + // Expiration is returned in response. + // Consider future optimization of this logic to cache AWS tokens + // until their natural expiration. + const awsCreds = await __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "m", _DefaultAwsSecurityCredentialsSupplier_retrieveAwsSecurityCredentials).call(this, roleName, metadataHeaders, context.transporter); + return { + accessKeyId: awsCreds.AccessKeyId, + secretAccessKey: awsCreds.SecretAccessKey, + token: awsCreds.Token, + }; + } +} +exports.DefaultAwsSecurityCredentialsSupplier = DefaultAwsSecurityCredentialsSupplier; +_DefaultAwsSecurityCredentialsSupplier_instances = new WeakSet(), _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken = +/** + * @param transporter The transporter to use for requests. + * @return A promise that resolves with the IMDSv2 Session Token. + */ +async function _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken(transporter) { + const opts = { + ...this.additionalGaxiosOptions, + url: this.imdsV2SessionTokenUrl, + method: 'PUT', + responseType: 'text', + headers: { 'x-aws-ec2-metadata-token-ttl-seconds': '300' }, + }; + const response = await transporter.request(opts); + return response.data; +}, _DefaultAwsSecurityCredentialsSupplier_getAwsRoleName = +/** + * @param headers The headers to be used in the metadata request. + * @param transporter The transporter to use for requests. + * @return A promise that resolves with the assigned role to the current + * AWS VM. This is needed for calling the security-credentials endpoint. + */ +async function _DefaultAwsSecurityCredentialsSupplier_getAwsRoleName(headers, transporter) { + if (!this.securityCredentialsUrl) { + throw new Error('Unable to determine AWS role name due to missing ' + + '"options.credential_source.url"'); + } + const opts = { + ...this.additionalGaxiosOptions, + url: this.securityCredentialsUrl, + method: 'GET', + responseType: 'text', + headers: headers, + }; + const response = await transporter.request(opts); + return response.data; +}, _DefaultAwsSecurityCredentialsSupplier_retrieveAwsSecurityCredentials = +/** + * Retrieves the temporary AWS credentials by calling the security-credentials + * endpoint as specified in the `credential_source` object. + * @param roleName The role attached to the current VM. + * @param headers The headers to be used in the metadata request. + * @param transporter The transporter to use for requests. + * @return A promise that resolves with the temporary AWS credentials + * needed for creating the GetCallerIdentity signed request. + */ +async function _DefaultAwsSecurityCredentialsSupplier_retrieveAwsSecurityCredentials(roleName, headers, transporter) { + const response = await transporter.request({ + ...this.additionalGaxiosOptions, + url: `${this.securityCredentialsUrl}/${roleName}`, + responseType: 'json', + headers: headers, + }); + return response.data; +}, _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get = function _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get() { + // The AWS region can be provided through AWS_REGION or AWS_DEFAULT_REGION. + // Only one is required. + return (process.env['AWS_REGION'] || process.env['AWS_DEFAULT_REGION'] || null); +}, _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get = function _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get() { + // Both AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY are required. + if (process.env['AWS_ACCESS_KEY_ID'] && + process.env['AWS_SECRET_ACCESS_KEY']) { + return { + accessKeyId: process.env['AWS_ACCESS_KEY_ID'], + secretAccessKey: process.env['AWS_SECRET_ACCESS_KEY'], + token: process.env['AWS_SESSION_TOKEN'], + }; + } + return null; +}; + + +/***/ }), + +/***/ 7556: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DownscopedClient = exports.EXPIRATION_TIME_OFFSET = exports.MAX_ACCESS_BOUNDARY_RULES_COUNT = void 0; +const stream = __nccwpck_require__(2203); +const authclient_1 = __nccwpck_require__(4810); +const sts = __nccwpck_require__(121); +/** + * The required token exchange grant_type: rfc8693#section-2.1 + */ +const STS_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:token-exchange'; +/** + * The requested token exchange requested_token_type: rfc8693#section-2.1 + */ +const STS_REQUEST_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; +/** + * The requested token exchange subject_token_type: rfc8693#section-2.1 + */ +const STS_SUBJECT_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; +/** + * The maximum number of access boundary rules a Credential Access Boundary + * can contain. + */ +exports.MAX_ACCESS_BOUNDARY_RULES_COUNT = 10; +/** + * Offset to take into account network delays and server clock skews. + */ +exports.EXPIRATION_TIME_OFFSET = 5 * 60 * 1000; +/** + * Defines a set of Google credentials that are downscoped from an existing set + * of Google OAuth2 credentials. This is useful to restrict the Identity and + * Access Management (IAM) permissions that a short-lived credential can use. + * The common pattern of usage is to have a token broker with elevated access + * generate these downscoped credentials from higher access source credentials + * and pass the downscoped short-lived access tokens to a token consumer via + * some secure authenticated channel for limited access to Google Cloud Storage + * resources. + */ +class DownscopedClient extends authclient_1.AuthClient { + /** + * Instantiates a downscoped client object using the provided source + * AuthClient and credential access boundary rules. + * To downscope permissions of a source AuthClient, a Credential Access + * Boundary that specifies which resources the new credential can access, as + * well as an upper bound on the permissions that are available on each + * resource, has to be defined. A downscoped client can then be instantiated + * using the source AuthClient and the Credential Access Boundary. + * @param authClient The source AuthClient to be downscoped based on the + * provided Credential Access Boundary rules. + * @param credentialAccessBoundary The Credential Access Boundary which + * contains a list of access boundary rules. Each rule contains information + * on the resource that the rule applies to, the upper bound of the + * permissions that are available on that resource and an optional + * condition to further restrict permissions. + * @param additionalOptions **DEPRECATED, set this in the provided `authClient`.** + * Optional additional behavior customization options. + * @param quotaProjectId **DEPRECATED, set this in the provided `authClient`.** + * Optional quota project id for setting up in the x-goog-user-project header. + */ + constructor(authClient, credentialAccessBoundary, additionalOptions, quotaProjectId) { + super({ ...additionalOptions, quotaProjectId }); + this.authClient = authClient; + this.credentialAccessBoundary = credentialAccessBoundary; + // Check 1-10 Access Boundary Rules are defined within Credential Access + // Boundary. + if (credentialAccessBoundary.accessBoundary.accessBoundaryRules.length === 0) { + throw new Error('At least one access boundary rule needs to be defined.'); + } + else if (credentialAccessBoundary.accessBoundary.accessBoundaryRules.length > + exports.MAX_ACCESS_BOUNDARY_RULES_COUNT) { + throw new Error('The provided access boundary has more than ' + + `${exports.MAX_ACCESS_BOUNDARY_RULES_COUNT} access boundary rules.`); + } + // Check at least one permission should be defined in each Access Boundary + // Rule. + for (const rule of credentialAccessBoundary.accessBoundary + .accessBoundaryRules) { + if (rule.availablePermissions.length === 0) { + throw new Error('At least one permission should be defined in access boundary rules.'); + } + } + this.stsCredential = new sts.StsCredentials(`https://sts.${this.universeDomain}/v1/token`); + this.cachedDownscopedAccessToken = null; + } + /** + * Provides a mechanism to inject Downscoped access tokens directly. + * The expiry_date field is required to facilitate determination of the token + * expiration which would make it easier for the token consumer to handle. + * @param credentials The Credentials object to set on the current client. + */ + setCredentials(credentials) { + if (!credentials.expiry_date) { + throw new Error('The access token expiry_date field is missing in the provided ' + + 'credentials.'); + } + super.setCredentials(credentials); + this.cachedDownscopedAccessToken = credentials; + } + async getAccessToken() { + // If the cached access token is unavailable or expired, force refresh. + // The Downscoped access token will be returned in + // DownscopedAccessTokenResponse format. + if (!this.cachedDownscopedAccessToken || + this.isExpired(this.cachedDownscopedAccessToken)) { + await this.refreshAccessTokenAsync(); + } + // Return Downscoped access token in DownscopedAccessTokenResponse format. + return { + token: this.cachedDownscopedAccessToken.access_token, + expirationTime: this.cachedDownscopedAccessToken.expiry_date, + res: this.cachedDownscopedAccessToken.res, + }; + } + /** + * The main authentication interface. It takes an optional url which when + * present is the endpoint being accessed, and returns a Promise which + * resolves with authorization header fields. + * + * The result has the form: + * { Authorization: 'Bearer ' } + */ + async getRequestHeaders() { + const accessTokenResponse = await this.getAccessToken(); + const headers = { + Authorization: `Bearer ${accessTokenResponse.token}`, + }; + return this.addSharedMetadataHeaders(headers); + } + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); + }); + } + else { + return this.requestAsync(opts); + } + } + /** + * Authenticates the provided HTTP request, processes it and resolves with the + * returned response. + * @param opts The HTTP request options. + * @param reAuthRetried Whether the current attempt is a retry after a failed attempt due to an auth failure + * @return A promise that resolves with the successful response. + */ + async requestAsync(opts, reAuthRetried = false) { + let response; + try { + const requestHeaders = await this.getRequestHeaders(); + opts.headers = opts.headers || {}; + if (requestHeaders && requestHeaders['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = + requestHeaders['x-goog-user-project']; + } + if (requestHeaders && requestHeaders.Authorization) { + opts.headers.Authorization = requestHeaders.Authorization; + } + response = await this.transporter.request(opts); + } + catch (e) { + const res = e.response; + if (res) { + const statusCode = res.status; + // Retry the request for metadata if the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - forceRefreshOnFailure is true + const isReadableStream = res.config.data instanceof stream.Readable; + const isAuthErr = statusCode === 401 || statusCode === 403; + if (!reAuthRetried && + isAuthErr && + !isReadableStream && + this.forceRefreshOnFailure) { + await this.refreshAccessTokenAsync(); + return await this.requestAsync(opts, true); + } + } + throw e; + } + return response; + } + /** + * Forces token refresh, even if unexpired tokens are currently cached. + * GCP access tokens are retrieved from authclient object/source credential. + * Then GCP access tokens are exchanged for downscoped access tokens via the + * token exchange endpoint. + * @return A promise that resolves with the fresh downscoped access token. + */ + async refreshAccessTokenAsync() { + var _a; + // Retrieve GCP access token from source credential. + const subjectToken = (await this.authClient.getAccessToken()).token; + // Construct the STS credentials options. + const stsCredentialsOptions = { + grantType: STS_GRANT_TYPE, + requestedTokenType: STS_REQUEST_TOKEN_TYPE, + subjectToken: subjectToken, + subjectTokenType: STS_SUBJECT_TOKEN_TYPE, + }; + // Exchange the source AuthClient access token for a Downscoped access + // token. + const stsResponse = await this.stsCredential.exchangeToken(stsCredentialsOptions, undefined, this.credentialAccessBoundary); + /** + * The STS endpoint will only return the expiration time for the downscoped + * access token if the original access token represents a service account. + * The downscoped token's expiration time will always match the source + * credential expiration. When no expires_in is returned, we can copy the + * source credential's expiration time. + */ + const sourceCredExpireDate = ((_a = this.authClient.credentials) === null || _a === void 0 ? void 0 : _a.expiry_date) || null; + const expiryDate = stsResponse.expires_in + ? new Date().getTime() + stsResponse.expires_in * 1000 + : sourceCredExpireDate; + // Save response in cached access token. + this.cachedDownscopedAccessToken = { + access_token: stsResponse.access_token, + expiry_date: expiryDate, + res: stsResponse.res, + }; + // Save credentials. + this.credentials = {}; + Object.assign(this.credentials, this.cachedDownscopedAccessToken); + delete this.credentials.res; + // Trigger tokens event to notify external listeners. + this.emit('tokens', { + refresh_token: null, + expiry_date: this.cachedDownscopedAccessToken.expiry_date, + access_token: this.cachedDownscopedAccessToken.access_token, + token_type: 'Bearer', + id_token: null, + }); + // Return the cached access token. + return this.cachedDownscopedAccessToken; + } + /** + * Returns whether the provided credentials are expired or not. + * If there is no expiry time, assumes the token is not expired or expiring. + * @param downscopedAccessToken The credentials to check for expiration. + * @return Whether the credentials are expired or not. + */ + isExpired(downscopedAccessToken) { + const now = new Date().getTime(); + return downscopedAccessToken.expiry_date + ? now >= + downscopedAccessToken.expiry_date - this.eagerRefreshThresholdMillis + : false; + } +} +exports.DownscopedClient = DownscopedClient; + + +/***/ }), + +/***/ 963: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2018 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GCPEnv = void 0; +exports.clear = clear; +exports.getEnv = getEnv; +const gcpMetadata = __nccwpck_require__(3046); +var GCPEnv; +(function (GCPEnv) { + GCPEnv["APP_ENGINE"] = "APP_ENGINE"; + GCPEnv["KUBERNETES_ENGINE"] = "KUBERNETES_ENGINE"; + GCPEnv["CLOUD_FUNCTIONS"] = "CLOUD_FUNCTIONS"; + GCPEnv["COMPUTE_ENGINE"] = "COMPUTE_ENGINE"; + GCPEnv["CLOUD_RUN"] = "CLOUD_RUN"; + GCPEnv["NONE"] = "NONE"; +})(GCPEnv || (exports.GCPEnv = GCPEnv = {})); +let envPromise; +function clear() { + envPromise = undefined; +} +async function getEnv() { + if (envPromise) { + return envPromise; + } + envPromise = getEnvMemoized(); + return envPromise; +} +async function getEnvMemoized() { + let env = GCPEnv.NONE; + if (isAppEngine()) { + env = GCPEnv.APP_ENGINE; + } + else if (isCloudFunction()) { + env = GCPEnv.CLOUD_FUNCTIONS; + } + else if (await isComputeEngine()) { + if (await isKubernetesEngine()) { + env = GCPEnv.KUBERNETES_ENGINE; + } + else if (isCloudRun()) { + env = GCPEnv.CLOUD_RUN; + } + else { + env = GCPEnv.COMPUTE_ENGINE; + } + } + else { + env = GCPEnv.NONE; + } + return env; +} +function isAppEngine() { + return !!(process.env.GAE_SERVICE || process.env.GAE_MODULE_NAME); +} +function isCloudFunction() { + return !!(process.env.FUNCTION_NAME || process.env.FUNCTION_TARGET); +} +/** + * This check only verifies that the environment is running knative. + * This must be run *after* checking for Kubernetes, otherwise it will + * return a false positive. + */ +function isCloudRun() { + return !!process.env.K_CONFIGURATION; +} +async function isKubernetesEngine() { + try { + await gcpMetadata.instance('attributes/cluster-name'); + return true; + } + catch (e) { + return false; + } +} +async function isComputeEngine() { + return gcpMetadata.isAvailable(); +} + + +/***/ }), + +/***/ 3247: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.InvalidSubjectTokenError = exports.InvalidMessageFieldError = exports.InvalidCodeFieldError = exports.InvalidTokenTypeFieldError = exports.InvalidExpirationTimeFieldError = exports.InvalidSuccessFieldError = exports.InvalidVersionFieldError = exports.ExecutableResponseError = exports.ExecutableResponse = void 0; +const SAML_SUBJECT_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:saml2'; +const OIDC_SUBJECT_TOKEN_TYPE1 = 'urn:ietf:params:oauth:token-type:id_token'; +const OIDC_SUBJECT_TOKEN_TYPE2 = 'urn:ietf:params:oauth:token-type:jwt'; +/** + * Defines the response of a 3rd party executable run by the pluggable auth client. + */ +class ExecutableResponse { + /** + * Instantiates an ExecutableResponse instance using the provided JSON object + * from the output of the executable. + * @param responseJson Response from a 3rd party executable, loaded from a + * run of the executable or a cached output file. + */ + constructor(responseJson) { + // Check that the required fields exist in the json response. + if (!responseJson.version) { + throw new InvalidVersionFieldError("Executable response must contain a 'version' field."); + } + if (responseJson.success === undefined) { + throw new InvalidSuccessFieldError("Executable response must contain a 'success' field."); + } + this.version = responseJson.version; + this.success = responseJson.success; + // Validate required fields for a successful response. + if (this.success) { + this.expirationTime = responseJson.expiration_time; + this.tokenType = responseJson.token_type; + // Validate token type field. + if (this.tokenType !== SAML_SUBJECT_TOKEN_TYPE && + this.tokenType !== OIDC_SUBJECT_TOKEN_TYPE1 && + this.tokenType !== OIDC_SUBJECT_TOKEN_TYPE2) { + throw new InvalidTokenTypeFieldError("Executable response must contain a 'token_type' field when successful " + + `and it must be one of ${OIDC_SUBJECT_TOKEN_TYPE1}, ${OIDC_SUBJECT_TOKEN_TYPE2}, or ${SAML_SUBJECT_TOKEN_TYPE}.`); + } + // Validate subject token. + if (this.tokenType === SAML_SUBJECT_TOKEN_TYPE) { + if (!responseJson.saml_response) { + throw new InvalidSubjectTokenError(`Executable response must contain a 'saml_response' field when token_type=${SAML_SUBJECT_TOKEN_TYPE}.`); + } + this.subjectToken = responseJson.saml_response; + } + else { + if (!responseJson.id_token) { + throw new InvalidSubjectTokenError("Executable response must contain a 'id_token' field when " + + `token_type=${OIDC_SUBJECT_TOKEN_TYPE1} or ${OIDC_SUBJECT_TOKEN_TYPE2}.`); + } + this.subjectToken = responseJson.id_token; + } + } + else { + // Both code and message must be provided for unsuccessful responses. + if (!responseJson.code) { + throw new InvalidCodeFieldError("Executable response must contain a 'code' field when unsuccessful."); + } + if (!responseJson.message) { + throw new InvalidMessageFieldError("Executable response must contain a 'message' field when unsuccessful."); + } + this.errorCode = responseJson.code; + this.errorMessage = responseJson.message; + } + } + /** + * @return A boolean representing if the response has a valid token. Returns + * true when the response was successful and the token is not expired. + */ + isValid() { + return !this.isExpired() && this.success; + } + /** + * @return A boolean representing if the response is expired. Returns true if the + * provided timeout has passed. + */ + isExpired() { + return (this.expirationTime !== undefined && + this.expirationTime < Math.round(Date.now() / 1000)); + } +} +exports.ExecutableResponse = ExecutableResponse; +/** + * An error thrown by the ExecutableResponse class. + */ +class ExecutableResponseError extends Error { + constructor(message) { + super(message); + Object.setPrototypeOf(this, new.target.prototype); + } +} +exports.ExecutableResponseError = ExecutableResponseError; +/** + * An error thrown when the 'version' field in an executable response is missing or invalid. + */ +class InvalidVersionFieldError extends ExecutableResponseError { +} +exports.InvalidVersionFieldError = InvalidVersionFieldError; +/** + * An error thrown when the 'success' field in an executable response is missing or invalid. + */ +class InvalidSuccessFieldError extends ExecutableResponseError { +} +exports.InvalidSuccessFieldError = InvalidSuccessFieldError; +/** + * An error thrown when the 'expiration_time' field in an executable response is missing or invalid. + */ +class InvalidExpirationTimeFieldError extends ExecutableResponseError { +} +exports.InvalidExpirationTimeFieldError = InvalidExpirationTimeFieldError; +/** + * An error thrown when the 'token_type' field in an executable response is missing or invalid. + */ +class InvalidTokenTypeFieldError extends ExecutableResponseError { +} +exports.InvalidTokenTypeFieldError = InvalidTokenTypeFieldError; +/** + * An error thrown when the 'code' field in an executable response is missing or invalid. + */ +class InvalidCodeFieldError extends ExecutableResponseError { +} +exports.InvalidCodeFieldError = InvalidCodeFieldError; +/** + * An error thrown when the 'message' field in an executable response is missing or invalid. + */ +class InvalidMessageFieldError extends ExecutableResponseError { +} +exports.InvalidMessageFieldError = InvalidMessageFieldError; +/** + * An error thrown when the subject token in an executable response is missing or invalid. + */ +class InvalidSubjectTokenError extends ExecutableResponseError { +} +exports.InvalidSubjectTokenError = InvalidSubjectTokenError; + + +/***/ }), + +/***/ 4240: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ExternalAccountAuthorizedUserClient = exports.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE = void 0; +const authclient_1 = __nccwpck_require__(4810); +const oauth2common_1 = __nccwpck_require__(6653); +const gaxios_1 = __nccwpck_require__(7003); +const stream = __nccwpck_require__(2203); +const baseexternalclient_1 = __nccwpck_require__(142); +/** + * The credentials JSON file type for external account authorized user clients. + */ +exports.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE = 'external_account_authorized_user'; +const DEFAULT_TOKEN_URL = 'https://sts.{universeDomain}/v1/oauthtoken'; +/** + * Handler for token refresh requests sent to the token_url endpoint for external + * authorized user credentials. + */ +class ExternalAccountAuthorizedUserHandler extends oauth2common_1.OAuthClientAuthHandler { + /** + * Initializes an ExternalAccountAuthorizedUserHandler instance. + * @param url The URL of the token refresh endpoint. + * @param transporter The transporter to use for the refresh request. + * @param clientAuthentication The client authentication credentials to use + * for the refresh request. + */ + constructor(url, transporter, clientAuthentication) { + super(clientAuthentication); + this.url = url; + this.transporter = transporter; + } + /** + * Requests a new access token from the token_url endpoint using the provided + * refresh token. + * @param refreshToken The refresh token to use to generate a new access token. + * @param additionalHeaders Optional additional headers to pass along the + * request. + * @return A promise that resolves with the token refresh response containing + * the requested access token and its expiration time. + */ + async refreshToken(refreshToken, additionalHeaders) { + const values = new URLSearchParams({ + grant_type: 'refresh_token', + refresh_token: refreshToken, + }); + const headers = { + 'Content-Type': 'application/x-www-form-urlencoded', + ...additionalHeaders, + }; + const opts = { + ...ExternalAccountAuthorizedUserHandler.RETRY_CONFIG, + url: this.url, + method: 'POST', + headers, + data: values.toString(), + responseType: 'json', + }; + // Apply OAuth client authentication. + this.applyClientAuthenticationOptions(opts); + try { + const response = await this.transporter.request(opts); + // Successful response. + const tokenRefreshResponse = response.data; + tokenRefreshResponse.res = response; + return tokenRefreshResponse; + } + catch (error) { + // Translate error to OAuthError. + if (error instanceof gaxios_1.GaxiosError && error.response) { + throw (0, oauth2common_1.getErrorFromOAuthErrorResponse)(error.response.data, + // Preserve other fields from the original error. + error); + } + // Request could fail before the server responds. + throw error; + } + } +} +/** + * External Account Authorized User Client. This is used for OAuth2 credentials + * sourced using external identities through Workforce Identity Federation. + * Obtaining the initial access and refresh token can be done through the + * Google Cloud CLI. + */ +class ExternalAccountAuthorizedUserClient extends authclient_1.AuthClient { + /** + * Instantiates an ExternalAccountAuthorizedUserClient instances using the + * provided JSON object loaded from a credentials files. + * An error is throws if the credential is not valid. + * @param options The external account authorized user option object typically + * from the external accoutn authorized user JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + var _a; + super({ ...options, ...additionalOptions }); + if (options.universe_domain) { + this.universeDomain = options.universe_domain; + } + this.refreshToken = options.refresh_token; + const clientAuth = { + confidentialClientType: 'basic', + clientId: options.client_id, + clientSecret: options.client_secret, + }; + this.externalAccountAuthorizedUserHandler = + new ExternalAccountAuthorizedUserHandler((_a = options.token_url) !== null && _a !== void 0 ? _a : DEFAULT_TOKEN_URL.replace('{universeDomain}', this.universeDomain), this.transporter, clientAuth); + this.cachedAccessToken = null; + this.quotaProjectId = options.quota_project_id; + // As threshold could be zero, + // eagerRefreshThresholdMillis || EXPIRATION_TIME_OFFSET will override the + // zero value. + if (typeof (additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.eagerRefreshThresholdMillis) !== 'number') { + this.eagerRefreshThresholdMillis = baseexternalclient_1.EXPIRATION_TIME_OFFSET; + } + else { + this.eagerRefreshThresholdMillis = additionalOptions + .eagerRefreshThresholdMillis; + } + this.forceRefreshOnFailure = !!(additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.forceRefreshOnFailure); + } + async getAccessToken() { + // If cached access token is unavailable or expired, force refresh. + if (!this.cachedAccessToken || this.isExpired(this.cachedAccessToken)) { + await this.refreshAccessTokenAsync(); + } + // Return GCP access token in GetAccessTokenResponse format. + return { + token: this.cachedAccessToken.access_token, + res: this.cachedAccessToken.res, + }; + } + async getRequestHeaders() { + const accessTokenResponse = await this.getAccessToken(); + const headers = { + Authorization: `Bearer ${accessTokenResponse.token}`, + }; + return this.addSharedMetadataHeaders(headers); + } + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); + }); + } + else { + return this.requestAsync(opts); + } + } + /** + * Authenticates the provided HTTP request, processes it and resolves with the + * returned response. + * @param opts The HTTP request options. + * @param reAuthRetried Whether the current attempt is a retry after a failed attempt due to an auth failure. + * @return A promise that resolves with the successful response. + */ + async requestAsync(opts, reAuthRetried = false) { + let response; + try { + const requestHeaders = await this.getRequestHeaders(); + opts.headers = opts.headers || {}; + if (requestHeaders && requestHeaders['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = + requestHeaders['x-goog-user-project']; + } + if (requestHeaders && requestHeaders.Authorization) { + opts.headers.Authorization = requestHeaders.Authorization; + } + response = await this.transporter.request(opts); + } + catch (e) { + const res = e.response; + if (res) { + const statusCode = res.status; + // Retry the request for metadata if the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - forceRefreshOnFailure is true + const isReadableStream = res.config.data instanceof stream.Readable; + const isAuthErr = statusCode === 401 || statusCode === 403; + if (!reAuthRetried && + isAuthErr && + !isReadableStream && + this.forceRefreshOnFailure) { + await this.refreshAccessTokenAsync(); + return await this.requestAsync(opts, true); + } + } + throw e; + } + return response; + } + /** + * Forces token refresh, even if unexpired tokens are currently cached. + * @return A promise that resolves with the refreshed credential. + */ + async refreshAccessTokenAsync() { + // Refresh the access token using the refresh token. + const refreshResponse = await this.externalAccountAuthorizedUserHandler.refreshToken(this.refreshToken); + this.cachedAccessToken = { + access_token: refreshResponse.access_token, + expiry_date: new Date().getTime() + refreshResponse.expires_in * 1000, + res: refreshResponse.res, + }; + if (refreshResponse.refresh_token !== undefined) { + this.refreshToken = refreshResponse.refresh_token; + } + return this.cachedAccessToken; + } + /** + * Returns whether the provided credentials are expired or not. + * If there is no expiry time, assumes the token is not expired or expiring. + * @param credentials The credentials to check for expiration. + * @return Whether the credentials are expired or not. + */ + isExpired(credentials) { + const now = new Date().getTime(); + return credentials.expiry_date + ? now >= credentials.expiry_date - this.eagerRefreshThresholdMillis + : false; + } +} +exports.ExternalAccountAuthorizedUserClient = ExternalAccountAuthorizedUserClient; + + +/***/ }), + +/***/ 8323: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ExternalAccountClient = void 0; +const baseexternalclient_1 = __nccwpck_require__(142); +const identitypoolclient_1 = __nccwpck_require__(9960); +const awsclient_1 = __nccwpck_require__(1261); +const pluggable_auth_client_1 = __nccwpck_require__(6077); +/** + * Dummy class with no constructor. Developers are expected to use fromJSON. + */ +class ExternalAccountClient { + constructor() { + throw new Error('ExternalAccountClients should be initialized via: ' + + 'ExternalAccountClient.fromJSON(), ' + + 'directly via explicit constructors, eg. ' + + 'new AwsClient(options), new IdentityPoolClient(options), new' + + 'PluggableAuthClientOptions, or via ' + + 'new GoogleAuth(options).getClient()'); + } + /** + * This static method will instantiate the + * corresponding type of external account credential depending on the + * underlying credential source. + * @param options The external account options object typically loaded + * from the external account JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + * @return A BaseExternalAccountClient instance or null if the options + * provided do not correspond to an external account credential. + */ + static fromJSON(options, additionalOptions) { + var _a, _b; + if (options && options.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { + if ((_a = options.credential_source) === null || _a === void 0 ? void 0 : _a.environment_id) { + return new awsclient_1.AwsClient(options, additionalOptions); + } + else if ((_b = options.credential_source) === null || _b === void 0 ? void 0 : _b.executable) { + return new pluggable_auth_client_1.PluggableAuthClient(options, additionalOptions); + } + else { + return new identitypoolclient_1.IdentityPoolClient(options, additionalOptions); + } + } + else { + return null; + } + } +} +exports.ExternalAccountClient = ExternalAccountClient; + + +/***/ }), + +/***/ 932: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var _a, _b, _c; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.FileSubjectTokenSupplier = void 0; +const util_1 = __nccwpck_require__(9023); +const fs = __nccwpck_require__(9896); +// fs.readfile is undefined in browser karma tests causing +// `npm run browser-test` to fail as test.oauth2.ts imports this file via +// src/index.ts. +// Fallback to void function to avoid promisify throwing a TypeError. +const readFile = (0, util_1.promisify)((_a = fs.readFile) !== null && _a !== void 0 ? _a : (() => { })); +const realpath = (0, util_1.promisify)((_b = fs.realpath) !== null && _b !== void 0 ? _b : (() => { })); +const lstat = (0, util_1.promisify)((_c = fs.lstat) !== null && _c !== void 0 ? _c : (() => { })); +/** + * Internal subject token supplier implementation used when a file location + * is configured in the credential configuration used to build an {@link IdentityPoolClient} + */ +class FileSubjectTokenSupplier { + /** + * Instantiates a new file based subject token supplier. + * @param opts The file subject token supplier options to build the supplier + * with. + */ + constructor(opts) { + this.filePath = opts.filePath; + this.formatType = opts.formatType; + this.subjectTokenFieldName = opts.subjectTokenFieldName; + } + /** + * Returns the subject token stored at the file specified in the constructor. + * @param context {@link ExternalAccountSupplierContext} from the calling + * {@link IdentityPoolClient}, contains the requested audience and subject + * token type for the external account identity. Not used. + */ + async getSubjectToken(context) { + // Make sure there is a file at the path. lstatSync will throw if there is + // nothing there. + let parsedFilePath = this.filePath; + try { + // Resolve path to actual file in case of symlink. Expect a thrown error + // if not resolvable. + parsedFilePath = await realpath(parsedFilePath); + if (!(await lstat(parsedFilePath)).isFile()) { + throw new Error(); + } + } + catch (err) { + if (err instanceof Error) { + err.message = `The file at ${parsedFilePath} does not exist, or it is not a file. ${err.message}`; + } + throw err; + } + let subjectToken; + const rawText = await readFile(parsedFilePath, { encoding: 'utf8' }); + if (this.formatType === 'text') { + subjectToken = rawText; + } + else if (this.formatType === 'json' && this.subjectTokenFieldName) { + const json = JSON.parse(rawText); + subjectToken = json[this.subjectTokenFieldName]; + } + if (!subjectToken) { + throw new Error('Unable to parse the subject_token from the credential_source file'); + } + return subjectToken; + } +} +exports.FileSubjectTokenSupplier = FileSubjectTokenSupplier; + + +/***/ }), + +/***/ 5934: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var _GoogleAuth_instances, _GoogleAuth_pendingAuthClient, _GoogleAuth_prepareAndCacheClient, _GoogleAuth_determineClient; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GoogleAuth = exports.GoogleAuthExceptionMessages = exports.CLOUD_SDK_CLIENT_ID = void 0; +const child_process_1 = __nccwpck_require__(5317); +const fs = __nccwpck_require__(9896); +const gcpMetadata = __nccwpck_require__(3046); +const os = __nccwpck_require__(857); +const path = __nccwpck_require__(6928); +const crypto_1 = __nccwpck_require__(8851); +const transporters_1 = __nccwpck_require__(7633); +const computeclient_1 = __nccwpck_require__(977); +const idtokenclient_1 = __nccwpck_require__(2718); +const envDetect_1 = __nccwpck_require__(963); +const jwtclient_1 = __nccwpck_require__(5277); +const refreshclient_1 = __nccwpck_require__(9807); +const impersonated_1 = __nccwpck_require__(9964); +const externalclient_1 = __nccwpck_require__(8323); +const baseexternalclient_1 = __nccwpck_require__(142); +const authclient_1 = __nccwpck_require__(4810); +const externalAccountAuthorizedUserClient_1 = __nccwpck_require__(4240); +const util_1 = __nccwpck_require__(7870); +exports.CLOUD_SDK_CLIENT_ID = '764086051850-6qr4p6gpi6hn506pt8ejuq83di341hur.apps.googleusercontent.com'; +exports.GoogleAuthExceptionMessages = { + API_KEY_WITH_CREDENTIALS: 'API Keys and Credentials are mutually exclusive authentication methods and cannot be used together.', + NO_PROJECT_ID_FOUND: 'Unable to detect a Project Id in the current environment. \n' + + 'To learn more about authentication and Google APIs, visit: \n' + + 'https://cloud.google.com/docs/authentication/getting-started', + NO_CREDENTIALS_FOUND: 'Unable to find credentials in current environment. \n' + + 'To learn more about authentication and Google APIs, visit: \n' + + 'https://cloud.google.com/docs/authentication/getting-started', + NO_ADC_FOUND: 'Could not load the default credentials. Browse to https://cloud.google.com/docs/authentication/getting-started for more information.', + NO_UNIVERSE_DOMAIN_FOUND: 'Unable to detect a Universe Domain in the current environment.\n' + + 'To learn more about Universe Domain retrieval, visit: \n' + + 'https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys', +}; +class GoogleAuth { + // Note: this properly is only public to satisfy unit tests. + // https://github.com/Microsoft/TypeScript/issues/5228 + get isGCE() { + return this.checkIsGCE; + } + /** + * Configuration is resolved in the following order of precedence: + * - {@link GoogleAuthOptions.credentials `credentials`} + * - {@link GoogleAuthOptions.keyFilename `keyFilename`} + * - {@link GoogleAuthOptions.keyFile `keyFile`} + * + * {@link GoogleAuthOptions.clientOptions `clientOptions`} are passed to the + * {@link AuthClient `AuthClient`s}. + * + * @param opts + */ + constructor(opts = {}) { + _GoogleAuth_instances.add(this); + /** + * Caches a value indicating whether the auth layer is running on Google + * Compute Engine. + * @private + */ + this.checkIsGCE = undefined; + // To save the contents of the JSON credential file + this.jsonContent = null; + this.cachedCredential = null; + /** + * A pending {@link AuthClient}. Used for concurrent {@link GoogleAuth.getClient} calls. + */ + _GoogleAuth_pendingAuthClient.set(this, null); + this.clientOptions = {}; + this._cachedProjectId = opts.projectId || null; + this.cachedCredential = opts.authClient || null; + this.keyFilename = opts.keyFilename || opts.keyFile; + this.scopes = opts.scopes; + this.clientOptions = opts.clientOptions || {}; + this.jsonContent = opts.credentials || null; + this.apiKey = opts.apiKey || this.clientOptions.apiKey || null; + // Cannot use both API Key + Credentials + if (this.apiKey && (this.jsonContent || this.clientOptions.credentials)) { + throw new RangeError(exports.GoogleAuthExceptionMessages.API_KEY_WITH_CREDENTIALS); + } + if (opts.universeDomain) { + this.clientOptions.universeDomain = opts.universeDomain; + } + } + // GAPIC client libraries should always use self-signed JWTs. The following + // variables are set on the JWT client in order to indicate the type of library, + // and sign the JWT with the correct audience and scopes (if not supplied). + setGapicJWTValues(client) { + client.defaultServicePath = this.defaultServicePath; + client.useJWTAccessWithScope = this.useJWTAccessWithScope; + client.defaultScopes = this.defaultScopes; + } + getProjectId(callback) { + if (callback) { + this.getProjectIdAsync().then(r => callback(null, r), callback); + } + else { + return this.getProjectIdAsync(); + } + } + /** + * A temporary method for internal `getProjectId` usages where `null` is + * acceptable. In a future major release, `getProjectId` should return `null` + * (as the `Promise` base signature describes) and this private + * method should be removed. + * + * @returns Promise that resolves with project id (or `null`) + */ + async getProjectIdOptional() { + try { + return await this.getProjectId(); + } + catch (e) { + if (e instanceof Error && + e.message === exports.GoogleAuthExceptionMessages.NO_PROJECT_ID_FOUND) { + return null; + } + else { + throw e; + } + } + } + /** + * A private method for finding and caching a projectId. + * + * Supports environments in order of precedence: + * - GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variable + * - GOOGLE_APPLICATION_CREDENTIALS JSON file + * - Cloud SDK: `gcloud config config-helper --format json` + * - GCE project ID from metadata server + * + * @returns projectId + */ + async findAndCacheProjectId() { + let projectId = null; + projectId || (projectId = await this.getProductionProjectId()); + projectId || (projectId = await this.getFileProjectId()); + projectId || (projectId = await this.getDefaultServiceProjectId()); + projectId || (projectId = await this.getGCEProjectId()); + projectId || (projectId = await this.getExternalAccountClientProjectId()); + if (projectId) { + this._cachedProjectId = projectId; + return projectId; + } + else { + throw new Error(exports.GoogleAuthExceptionMessages.NO_PROJECT_ID_FOUND); + } + } + async getProjectIdAsync() { + if (this._cachedProjectId) { + return this._cachedProjectId; + } + if (!this._findProjectIdPromise) { + this._findProjectIdPromise = this.findAndCacheProjectId(); + } + return this._findProjectIdPromise; + } + /** + * Retrieves a universe domain from the metadata server via + * {@link gcpMetadata.universe}. + * + * @returns a universe domain + */ + async getUniverseDomainFromMetadataServer() { + var _a; + let universeDomain; + try { + universeDomain = await gcpMetadata.universe('universe-domain'); + universeDomain || (universeDomain = authclient_1.DEFAULT_UNIVERSE); + } + catch (e) { + if (e && ((_a = e === null || e === void 0 ? void 0 : e.response) === null || _a === void 0 ? void 0 : _a.status) === 404) { + universeDomain = authclient_1.DEFAULT_UNIVERSE; + } + else { + throw e; + } + } + return universeDomain; + } + /** + * Retrieves, caches, and returns the universe domain in the following order + * of precedence: + * - The universe domain in {@link GoogleAuth.clientOptions} + * - An existing or ADC {@link AuthClient}'s universe domain + * - {@link gcpMetadata.universe}, if {@link Compute} client + * + * @returns The universe domain + */ + async getUniverseDomain() { + let universeDomain = (0, util_1.originalOrCamelOptions)(this.clientOptions).get('universe_domain'); + try { + universeDomain !== null && universeDomain !== void 0 ? universeDomain : (universeDomain = (await this.getClient()).universeDomain); + } + catch (_a) { + // client or ADC is not available + universeDomain !== null && universeDomain !== void 0 ? universeDomain : (universeDomain = authclient_1.DEFAULT_UNIVERSE); + } + return universeDomain; + } + /** + * @returns Any scopes (user-specified or default scopes specified by the + * client library) that need to be set on the current Auth client. + */ + getAnyScopes() { + return this.scopes || this.defaultScopes; + } + getApplicationDefault(optionsOrCallback = {}, callback) { + let options; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else { + options = optionsOrCallback; + } + if (callback) { + this.getApplicationDefaultAsync(options).then(r => callback(null, r.credential, r.projectId), callback); + } + else { + return this.getApplicationDefaultAsync(options); + } + } + async getApplicationDefaultAsync(options = {}) { + // If we've already got a cached credential, return it. + // This will also preserve one's configured quota project, in case they + // set one directly on the credential previously. + if (this.cachedCredential) { + // cache, while preserving existing quota project preferences + return await __classPrivateFieldGet(this, _GoogleAuth_instances, "m", _GoogleAuth_prepareAndCacheClient).call(this, this.cachedCredential, null); + } + let credential; + // Check for the existence of a local environment variable pointing to the + // location of the credential file. This is typically used in local + // developer scenarios. + credential = + await this._tryGetApplicationCredentialsFromEnvironmentVariable(options); + if (credential) { + if (credential instanceof jwtclient_1.JWT) { + credential.scopes = this.scopes; + } + else if (credential instanceof baseexternalclient_1.BaseExternalAccountClient) { + credential.scopes = this.getAnyScopes(); + } + return await __classPrivateFieldGet(this, _GoogleAuth_instances, "m", _GoogleAuth_prepareAndCacheClient).call(this, credential); + } + // Look in the well-known credential file location. + credential = + await this._tryGetApplicationCredentialsFromWellKnownFile(options); + if (credential) { + if (credential instanceof jwtclient_1.JWT) { + credential.scopes = this.scopes; + } + else if (credential instanceof baseexternalclient_1.BaseExternalAccountClient) { + credential.scopes = this.getAnyScopes(); + } + return await __classPrivateFieldGet(this, _GoogleAuth_instances, "m", _GoogleAuth_prepareAndCacheClient).call(this, credential); + } + // Determine if we're running on GCE. + if (await this._checkIsGCE()) { + options.scopes = this.getAnyScopes(); + return await __classPrivateFieldGet(this, _GoogleAuth_instances, "m", _GoogleAuth_prepareAndCacheClient).call(this, new computeclient_1.Compute(options)); + } + throw new Error(exports.GoogleAuthExceptionMessages.NO_ADC_FOUND); + } + /** + * Determines whether the auth layer is running on Google Compute Engine. + * Checks for GCP Residency, then fallback to checking if metadata server + * is available. + * + * @returns A promise that resolves with the boolean. + * @api private + */ + async _checkIsGCE() { + if (this.checkIsGCE === undefined) { + this.checkIsGCE = + gcpMetadata.getGCPResidency() || (await gcpMetadata.isAvailable()); + } + return this.checkIsGCE; + } + /** + * Attempts to load default credentials from the environment variable path.. + * @returns Promise that resolves with the OAuth2Client or null. + * @api private + */ + async _tryGetApplicationCredentialsFromEnvironmentVariable(options) { + const credentialsPath = process.env['GOOGLE_APPLICATION_CREDENTIALS'] || + process.env['google_application_credentials']; + if (!credentialsPath || credentialsPath.length === 0) { + return null; + } + try { + return this._getApplicationCredentialsFromFilePath(credentialsPath, options); + } + catch (e) { + if (e instanceof Error) { + e.message = `Unable to read the credential file specified by the GOOGLE_APPLICATION_CREDENTIALS environment variable: ${e.message}`; + } + throw e; + } + } + /** + * Attempts to load default credentials from a well-known file location + * @return Promise that resolves with the OAuth2Client or null. + * @api private + */ + async _tryGetApplicationCredentialsFromWellKnownFile(options) { + // First, figure out the location of the file, depending upon the OS type. + let location = null; + if (this._isWindows()) { + // Windows + location = process.env['APPDATA']; + } + else { + // Linux or Mac + const home = process.env['HOME']; + if (home) { + location = path.join(home, '.config'); + } + } + // If we found the root path, expand it. + if (location) { + location = path.join(location, 'gcloud', 'application_default_credentials.json'); + if (!fs.existsSync(location)) { + location = null; + } + } + // The file does not exist. + if (!location) { + return null; + } + // The file seems to exist. Try to use it. + const client = await this._getApplicationCredentialsFromFilePath(location, options); + return client; + } + /** + * Attempts to load default credentials from a file at the given path.. + * @param filePath The path to the file to read. + * @returns Promise that resolves with the OAuth2Client + * @api private + */ + async _getApplicationCredentialsFromFilePath(filePath, options = {}) { + // Make sure the path looks like a string. + if (!filePath || filePath.length === 0) { + throw new Error('The file path is invalid.'); + } + // Make sure there is a file at the path. lstatSync will throw if there is + // nothing there. + try { + // Resolve path to actual file in case of symlink. Expect a thrown error + // if not resolvable. + filePath = fs.realpathSync(filePath); + if (!fs.lstatSync(filePath).isFile()) { + throw new Error(); + } + } + catch (err) { + if (err instanceof Error) { + err.message = `The file at ${filePath} does not exist, or it is not a file. ${err.message}`; + } + throw err; + } + // Now open a read stream on the file, and parse it. + const readStream = fs.createReadStream(filePath); + return this.fromStream(readStream, options); + } + /** + * Create a credentials instance using a given impersonated input options. + * @param json The impersonated input object. + * @returns JWT or UserRefresh Client with data + */ + fromImpersonatedJSON(json) { + var _a, _b, _c, _d; + if (!json) { + throw new Error('Must pass in a JSON object containing an impersonated refresh token'); + } + if (json.type !== impersonated_1.IMPERSONATED_ACCOUNT_TYPE) { + throw new Error(`The incoming JSON object does not have the "${impersonated_1.IMPERSONATED_ACCOUNT_TYPE}" type`); + } + if (!json.source_credentials) { + throw new Error('The incoming JSON object does not contain a source_credentials field'); + } + if (!json.service_account_impersonation_url) { + throw new Error('The incoming JSON object does not contain a service_account_impersonation_url field'); + } + const sourceClient = this.fromJSON(json.source_credentials); + if (((_a = json.service_account_impersonation_url) === null || _a === void 0 ? void 0 : _a.length) > 256) { + /** + * Prevents DOS attacks. + * @see {@link https://github.com/googleapis/google-auth-library-nodejs/security/code-scanning/85} + **/ + throw new RangeError(`Target principal is too long: ${json.service_account_impersonation_url}`); + } + // Extract service account from service_account_impersonation_url + const targetPrincipal = (_c = (_b = /(?[^/]+):(generateAccessToken|generateIdToken)$/.exec(json.service_account_impersonation_url)) === null || _b === void 0 ? void 0 : _b.groups) === null || _c === void 0 ? void 0 : _c.target; + if (!targetPrincipal) { + throw new RangeError(`Cannot extract target principal from ${json.service_account_impersonation_url}`); + } + const targetScopes = (_d = this.getAnyScopes()) !== null && _d !== void 0 ? _d : []; + return new impersonated_1.Impersonated({ + ...json, + sourceClient, + targetPrincipal, + targetScopes: Array.isArray(targetScopes) ? targetScopes : [targetScopes], + }); + } + /** + * Create a credentials instance using the given input options. + * This client is not cached. + * + * **Important**: If you accept a credential configuration (credential JSON/File/Stream) from an external source for authentication to Google Cloud, you must validate it before providing it to any Google API or library. Providing an unvalidated credential configuration to Google APIs can compromise the security of your systems and data. For more information, refer to {@link https://cloud.google.com/docs/authentication/external/externally-sourced-credentials Validate credential configurations from external sources}. + * + * @param json The input object. + * @param options The JWT or UserRefresh options for the client + * @returns JWT or UserRefresh Client with data + */ + fromJSON(json, options = {}) { + let client; + // user's preferred universe domain + const preferredUniverseDomain = (0, util_1.originalOrCamelOptions)(options).get('universe_domain'); + if (json.type === refreshclient_1.USER_REFRESH_ACCOUNT_TYPE) { + client = new refreshclient_1.UserRefreshClient(options); + client.fromJSON(json); + } + else if (json.type === impersonated_1.IMPERSONATED_ACCOUNT_TYPE) { + client = this.fromImpersonatedJSON(json); + } + else if (json.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { + client = externalclient_1.ExternalAccountClient.fromJSON(json, options); + client.scopes = this.getAnyScopes(); + } + else if (json.type === externalAccountAuthorizedUserClient_1.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE) { + client = new externalAccountAuthorizedUserClient_1.ExternalAccountAuthorizedUserClient(json, options); + } + else { + options.scopes = this.scopes; + client = new jwtclient_1.JWT(options); + this.setGapicJWTValues(client); + client.fromJSON(json); + } + if (preferredUniverseDomain) { + client.universeDomain = preferredUniverseDomain; + } + return client; + } + /** + * Return a JWT or UserRefreshClient from JavaScript object, caching both the + * object used to instantiate and the client. + * @param json The input object. + * @param options The JWT or UserRefresh options for the client + * @returns JWT or UserRefresh Client with data + */ + _cacheClientFromJSON(json, options) { + const client = this.fromJSON(json, options); + // cache both raw data used to instantiate client and client itself. + this.jsonContent = json; + this.cachedCredential = client; + return client; + } + fromStream(inputStream, optionsOrCallback = {}, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else { + options = optionsOrCallback; + } + if (callback) { + this.fromStreamAsync(inputStream, options).then(r => callback(null, r), callback); + } + else { + return this.fromStreamAsync(inputStream, options); + } + } + fromStreamAsync(inputStream, options) { + return new Promise((resolve, reject) => { + if (!inputStream) { + throw new Error('Must pass in a stream containing the Google auth settings.'); + } + const chunks = []; + inputStream + .setEncoding('utf8') + .on('error', reject) + .on('data', chunk => chunks.push(chunk)) + .on('end', () => { + try { + try { + const data = JSON.parse(chunks.join('')); + const r = this._cacheClientFromJSON(data, options); + return resolve(r); + } + catch (err) { + // If we failed parsing this.keyFileName, assume that it + // is a PEM or p12 certificate: + if (!this.keyFilename) + throw err; + const client = new jwtclient_1.JWT({ + ...this.clientOptions, + keyFile: this.keyFilename, + }); + this.cachedCredential = client; + this.setGapicJWTValues(client); + return resolve(client); + } + } + catch (err) { + return reject(err); + } + }); + }); + } + /** + * Create a credentials instance using the given API key string. + * The created client is not cached. In order to create and cache it use the {@link GoogleAuth.getClient `getClient`} method after first providing an {@link GoogleAuth.apiKey `apiKey`}. + * + * @param apiKey The API key string + * @param options An optional options object. + * @returns A JWT loaded from the key + */ + fromAPIKey(apiKey, options = {}) { + return new jwtclient_1.JWT({ ...options, apiKey }); + } + /** + * Determines whether the current operating system is Windows. + * @api private + */ + _isWindows() { + const sys = os.platform(); + if (sys && sys.length >= 3) { + if (sys.substring(0, 3).toLowerCase() === 'win') { + return true; + } + } + return false; + } + /** + * Run the Google Cloud SDK command that prints the default project ID + */ + async getDefaultServiceProjectId() { + return new Promise(resolve => { + (0, child_process_1.exec)('gcloud config config-helper --format json', (err, stdout) => { + if (!err && stdout) { + try { + const projectId = JSON.parse(stdout).configuration.properties.core.project; + resolve(projectId); + return; + } + catch (e) { + // ignore errors + } + } + resolve(null); + }); + }); + } + /** + * Loads the project id from environment variables. + * @api private + */ + getProductionProjectId() { + return (process.env['GCLOUD_PROJECT'] || + process.env['GOOGLE_CLOUD_PROJECT'] || + process.env['gcloud_project'] || + process.env['google_cloud_project']); + } + /** + * Loads the project id from the GOOGLE_APPLICATION_CREDENTIALS json file. + * @api private + */ + async getFileProjectId() { + if (this.cachedCredential) { + // Try to read the project ID from the cached credentials file + return this.cachedCredential.projectId; + } + // Ensure the projectId is loaded from the keyFile if available. + if (this.keyFilename) { + const creds = await this.getClient(); + if (creds && creds.projectId) { + return creds.projectId; + } + } + // Try to load a credentials file and read its project ID + const r = await this._tryGetApplicationCredentialsFromEnvironmentVariable(); + if (r) { + return r.projectId; + } + else { + return null; + } + } + /** + * Gets the project ID from external account client if available. + */ + async getExternalAccountClientProjectId() { + if (!this.jsonContent || this.jsonContent.type !== baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { + return null; + } + const creds = await this.getClient(); + // Do not suppress the underlying error, as the error could contain helpful + // information for debugging and fixing. This is especially true for + // external account creds as in order to get the project ID, the following + // operations have to succeed: + // 1. Valid credentials file should be supplied. + // 2. Ability to retrieve access tokens from STS token exchange API. + // 3. Ability to exchange for service account impersonated credentials (if + // enabled). + // 4. Ability to get project info using the access token from step 2 or 3. + // Without surfacing the error, it is harder for developers to determine + // which step went wrong. + return await creds.getProjectId(); + } + /** + * Gets the Compute Engine project ID if it can be inferred. + */ + async getGCEProjectId() { + try { + const r = await gcpMetadata.project('project-id'); + return r; + } + catch (e) { + // Ignore any errors + return null; + } + } + getCredentials(callback) { + if (callback) { + this.getCredentialsAsync().then(r => callback(null, r), callback); + } + else { + return this.getCredentialsAsync(); + } + } + async getCredentialsAsync() { + const client = await this.getClient(); + if (client instanceof impersonated_1.Impersonated) { + return { client_email: client.getTargetPrincipal() }; + } + if (client instanceof baseexternalclient_1.BaseExternalAccountClient) { + const serviceAccountEmail = client.getServiceAccountEmail(); + if (serviceAccountEmail) { + return { + client_email: serviceAccountEmail, + universe_domain: client.universeDomain, + }; + } + } + if (this.jsonContent) { + return { + client_email: this.jsonContent.client_email, + private_key: this.jsonContent.private_key, + universe_domain: this.jsonContent.universe_domain, + }; + } + if (await this._checkIsGCE()) { + const [client_email, universe_domain] = await Promise.all([ + gcpMetadata.instance('service-accounts/default/email'), + this.getUniverseDomain(), + ]); + return { client_email, universe_domain }; + } + throw new Error(exports.GoogleAuthExceptionMessages.NO_CREDENTIALS_FOUND); + } + /** + * Automatically obtain an {@link AuthClient `AuthClient`} based on the + * provided configuration. If no options were passed, use Application + * Default Credentials. + */ + async getClient() { + if (this.cachedCredential) { + return this.cachedCredential; + } + // Use an existing auth client request, or cache a new one + __classPrivateFieldSet(this, _GoogleAuth_pendingAuthClient, __classPrivateFieldGet(this, _GoogleAuth_pendingAuthClient, "f") || __classPrivateFieldGet(this, _GoogleAuth_instances, "m", _GoogleAuth_determineClient).call(this), "f"); + try { + return await __classPrivateFieldGet(this, _GoogleAuth_pendingAuthClient, "f"); + } + finally { + // reset the pending auth client in case it is changed later + __classPrivateFieldSet(this, _GoogleAuth_pendingAuthClient, null, "f"); + } + } + /** + * Creates a client which will fetch an ID token for authorization. + * @param targetAudience the audience for the fetched ID token. + * @returns IdTokenClient for making HTTP calls authenticated with ID tokens. + */ + async getIdTokenClient(targetAudience) { + const client = await this.getClient(); + if (!('fetchIdToken' in client)) { + throw new Error('Cannot fetch ID token in this environment, use GCE or set the GOOGLE_APPLICATION_CREDENTIALS environment variable to a service account credentials JSON file.'); + } + return new idtokenclient_1.IdTokenClient({ targetAudience, idTokenProvider: client }); + } + /** + * Automatically obtain application default credentials, and return + * an access token for making requests. + */ + async getAccessToken() { + const client = await this.getClient(); + return (await client.getAccessToken()).token; + } + /** + * Obtain the HTTP headers that will provide authorization for a given + * request. + */ + async getRequestHeaders(url) { + const client = await this.getClient(); + return client.getRequestHeaders(url); + } + /** + * Obtain credentials for a request, then attach the appropriate headers to + * the request options. + * @param opts Axios or Request options on which to attach the headers + */ + async authorizeRequest(opts) { + opts = opts || {}; + const url = opts.url || opts.uri; + const client = await this.getClient(); + const headers = await client.getRequestHeaders(url); + opts.headers = Object.assign(opts.headers || {}, headers); + return opts; + } + /** + * Automatically obtain application default credentials, and make an + * HTTP request using the given options. + * @param opts Axios request options for the HTTP request. + */ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + async request(opts) { + const client = await this.getClient(); + return client.request(opts); + } + /** + * Determine the compute environment in which the code is running. + */ + getEnv() { + return (0, envDetect_1.getEnv)(); + } + /** + * Sign the given data with the current private key, or go out + * to the IAM API to sign it. + * @param data The data to be signed. + * @param endpoint A custom endpoint to use. + * + * @example + * ``` + * sign('data', 'https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/'); + * ``` + */ + async sign(data, endpoint) { + const client = await this.getClient(); + const universe = await this.getUniverseDomain(); + endpoint = + endpoint || + `https://iamcredentials.${universe}/v1/projects/-/serviceAccounts/`; + if (client instanceof impersonated_1.Impersonated) { + const signed = await client.sign(data); + return signed.signedBlob; + } + const crypto = (0, crypto_1.createCrypto)(); + if (client instanceof jwtclient_1.JWT && client.key) { + const sign = await crypto.sign(client.key, data); + return sign; + } + const creds = await this.getCredentials(); + if (!creds.client_email) { + throw new Error('Cannot sign data without `client_email`.'); + } + return this.signBlob(crypto, creds.client_email, data, endpoint); + } + async signBlob(crypto, emailOrUniqueId, data, endpoint) { + const url = new URL(endpoint + `${emailOrUniqueId}:signBlob`); + const res = await this.request({ + method: 'POST', + url: url.href, + data: { + payload: crypto.encodeBase64StringUtf8(data), + }, + retry: true, + retryConfig: { + httpMethodsToRetry: ['POST'], + }, + }); + return res.data.signedBlob; + } +} +exports.GoogleAuth = GoogleAuth; +_GoogleAuth_pendingAuthClient = new WeakMap(), _GoogleAuth_instances = new WeakSet(), _GoogleAuth_prepareAndCacheClient = async function _GoogleAuth_prepareAndCacheClient(credential, quotaProjectIdOverride = process.env['GOOGLE_CLOUD_QUOTA_PROJECT'] || null) { + const projectId = await this.getProjectIdOptional(); + if (quotaProjectIdOverride) { + credential.quotaProjectId = quotaProjectIdOverride; + } + this.cachedCredential = credential; + return { credential, projectId }; +}, _GoogleAuth_determineClient = async function _GoogleAuth_determineClient() { + if (this.jsonContent) { + return this._cacheClientFromJSON(this.jsonContent, this.clientOptions); + } + else if (this.keyFilename) { + const filePath = path.resolve(this.keyFilename); + const stream = fs.createReadStream(filePath); + return await this.fromStreamAsync(stream, this.clientOptions); + } + else if (this.apiKey) { + const client = await this.fromAPIKey(this.apiKey, this.clientOptions); + client.scopes = this.scopes; + const { credential } = await __classPrivateFieldGet(this, _GoogleAuth_instances, "m", _GoogleAuth_prepareAndCacheClient).call(this, client); + return credential; + } + else { + const { credential } = await this.getApplicationDefaultAsync(this.clientOptions); + return credential; + } +}; +/** + * Export DefaultTransporter as a static property of the class. + */ +GoogleAuth.DefaultTransporter = transporters_1.DefaultTransporter; + + +/***/ }), + +/***/ 7009: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2014 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.IAMAuth = void 0; +class IAMAuth { + /** + * IAM credentials. + * + * @param selector the iam authority selector + * @param token the token + * @constructor + */ + constructor(selector, token) { + this.selector = selector; + this.token = token; + this.selector = selector; + this.token = token; + } + /** + * Acquire the HTTP headers required to make an authenticated request. + */ + getRequestHeaders() { + return { + 'x-goog-iam-authority-selector': this.selector, + 'x-goog-iam-authorization-token': this.token, + }; + } +} +exports.IAMAuth = IAMAuth; + + +/***/ }), + +/***/ 9960: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.IdentityPoolClient = void 0; +const baseexternalclient_1 = __nccwpck_require__(142); +const util_1 = __nccwpck_require__(7870); +const filesubjecttokensupplier_1 = __nccwpck_require__(932); +const urlsubjecttokensupplier_1 = __nccwpck_require__(4627); +/** + * Defines the Url-sourced and file-sourced external account clients mainly + * used for K8s and Azure workloads. + */ +class IdentityPoolClient extends baseexternalclient_1.BaseExternalAccountClient { + /** + * Instantiate an IdentityPoolClient instance using the provided JSON + * object loaded from an external account credentials file. + * An error is thrown if the credential is not a valid file-sourced or + * url-sourced credential or a workforce pool user project is provided + * with a non workforce audience. + * @param options The external account options object typically loaded + * from the external account JSON credential file. The camelCased options + * are aliases for the snake_cased options. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + super(options, additionalOptions); + const opts = (0, util_1.originalOrCamelOptions)(options); + const credentialSource = opts.get('credential_source'); + const subjectTokenSupplier = opts.get('subject_token_supplier'); + // Validate credential sourcing configuration. + if (!credentialSource && !subjectTokenSupplier) { + throw new Error('A credential source or subject token supplier must be specified.'); + } + if (credentialSource && subjectTokenSupplier) { + throw new Error('Only one of credential source or subject token supplier can be specified.'); + } + if (subjectTokenSupplier) { + this.subjectTokenSupplier = subjectTokenSupplier; + this.credentialSourceType = 'programmatic'; + } + else { + const credentialSourceOpts = (0, util_1.originalOrCamelOptions)(credentialSource); + const formatOpts = (0, util_1.originalOrCamelOptions)(credentialSourceOpts.get('format')); + // Text is the default format type. + const formatType = formatOpts.get('type') || 'text'; + const formatSubjectTokenFieldName = formatOpts.get('subject_token_field_name'); + if (formatType !== 'json' && formatType !== 'text') { + throw new Error(`Invalid credential_source format "${formatType}"`); + } + if (formatType === 'json' && !formatSubjectTokenFieldName) { + throw new Error('Missing subject_token_field_name for JSON credential_source format'); + } + const file = credentialSourceOpts.get('file'); + const url = credentialSourceOpts.get('url'); + const headers = credentialSourceOpts.get('headers'); + if (file && url) { + throw new Error('No valid Identity Pool "credential_source" provided, must be either file or url.'); + } + else if (file && !url) { + this.credentialSourceType = 'file'; + this.subjectTokenSupplier = new filesubjecttokensupplier_1.FileSubjectTokenSupplier({ + filePath: file, + formatType: formatType, + subjectTokenFieldName: formatSubjectTokenFieldName, + }); + } + else if (!file && url) { + this.credentialSourceType = 'url'; + this.subjectTokenSupplier = new urlsubjecttokensupplier_1.UrlSubjectTokenSupplier({ + url: url, + formatType: formatType, + subjectTokenFieldName: formatSubjectTokenFieldName, + headers: headers, + additionalGaxiosOptions: IdentityPoolClient.RETRY_CONFIG, + }); + } + else { + throw new Error('No valid Identity Pool "credential_source" provided, must be either file or url.'); + } + } + } + /** + * Triggered when a external subject token is needed to be exchanged for a GCP + * access token via GCP STS endpoint. Gets a subject token by calling + * the configured {@link SubjectTokenSupplier} + * @return A promise that resolves with the external subject token. + */ + async retrieveSubjectToken() { + return this.subjectTokenSupplier.getSubjectToken(this.supplierContext); + } +} +exports.IdentityPoolClient = IdentityPoolClient; + + +/***/ }), + +/***/ 2718: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.IdTokenClient = void 0; +const oauth2client_1 = __nccwpck_require__(91); +class IdTokenClient extends oauth2client_1.OAuth2Client { + /** + * Google ID Token client + * + * Retrieve ID token from the metadata server. + * See: https://cloud.google.com/docs/authentication/get-id-token#metadata-server + */ + constructor(options) { + super(options); + this.targetAudience = options.targetAudience; + this.idTokenProvider = options.idTokenProvider; + } + async getRequestMetadataAsync( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + url) { + if (!this.credentials.id_token || + !this.credentials.expiry_date || + this.isTokenExpiring()) { + const idToken = await this.idTokenProvider.fetchIdToken(this.targetAudience); + this.credentials = { + id_token: idToken, + expiry_date: this.getIdTokenExpiryDate(idToken), + }; + } + const headers = { + Authorization: 'Bearer ' + this.credentials.id_token, + }; + return { headers }; + } + getIdTokenExpiryDate(idToken) { + const payloadB64 = idToken.split('.')[1]; + if (payloadB64) { + const payload = JSON.parse(Buffer.from(payloadB64, 'base64').toString('ascii')); + return payload.exp * 1000; + } + } +} +exports.IdTokenClient = IdTokenClient; + + +/***/ }), + +/***/ 9964: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/** + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Impersonated = exports.IMPERSONATED_ACCOUNT_TYPE = void 0; +const oauth2client_1 = __nccwpck_require__(91); +const gaxios_1 = __nccwpck_require__(7003); +const util_1 = __nccwpck_require__(7870); +exports.IMPERSONATED_ACCOUNT_TYPE = 'impersonated_service_account'; +class Impersonated extends oauth2client_1.OAuth2Client { + /** + * Impersonated service account credentials. + * + * Create a new access token by impersonating another service account. + * + * Impersonated Credentials allowing credentials issued to a user or + * service account to impersonate another. The source project using + * Impersonated Credentials must enable the "IAMCredentials" API. + * Also, the target service account must grant the orginating principal + * the "Service Account Token Creator" IAM role. + * + * @param {object} options - The configuration object. + * @param {object} [options.sourceClient] the source credential used as to + * acquire the impersonated credentials. + * @param {string} [options.targetPrincipal] the service account to + * impersonate. + * @param {string[]} [options.delegates] the chained list of delegates + * required to grant the final access_token. If set, the sequence of + * identities must have "Service Account Token Creator" capability granted to + * the preceding identity. For example, if set to [serviceAccountB, + * serviceAccountC], the sourceCredential must have the Token Creator role on + * serviceAccountB. serviceAccountB must have the Token Creator on + * serviceAccountC. Finally, C must have Token Creator on target_principal. + * If left unset, sourceCredential must have that role on targetPrincipal. + * @param {string[]} [options.targetScopes] scopes to request during the + * authorization grant. + * @param {number} [options.lifetime] number of seconds the delegated + * credential should be valid for up to 3600 seconds by default, or 43,200 + * seconds by extending the token's lifetime, see: + * https://cloud.google.com/iam/docs/creating-short-lived-service-account-credentials#sa-credentials-oauth + * @param {string} [options.endpoint] api endpoint override. + */ + constructor(options = {}) { + var _a, _b, _c, _d, _e, _f; + super(options); + // Start with an expired refresh token, which will automatically be + // refreshed before the first API call is made. + this.credentials = { + expiry_date: 1, + refresh_token: 'impersonated-placeholder', + }; + this.sourceClient = (_a = options.sourceClient) !== null && _a !== void 0 ? _a : new oauth2client_1.OAuth2Client(); + this.targetPrincipal = (_b = options.targetPrincipal) !== null && _b !== void 0 ? _b : ''; + this.delegates = (_c = options.delegates) !== null && _c !== void 0 ? _c : []; + this.targetScopes = (_d = options.targetScopes) !== null && _d !== void 0 ? _d : []; + this.lifetime = (_e = options.lifetime) !== null && _e !== void 0 ? _e : 3600; + const usingExplicitUniverseDomain = !!(0, util_1.originalOrCamelOptions)(options).get('universe_domain'); + if (!usingExplicitUniverseDomain) { + // override the default universe with the source's universe + this.universeDomain = this.sourceClient.universeDomain; + } + else if (this.sourceClient.universeDomain !== this.universeDomain) { + // non-default universe and is not matching the source - this could be a credential leak + throw new RangeError(`Universe domain ${this.sourceClient.universeDomain} in source credentials does not match ${this.universeDomain} universe domain set for impersonated credentials.`); + } + this.endpoint = + (_f = options.endpoint) !== null && _f !== void 0 ? _f : `https://iamcredentials.${this.universeDomain}`; + } + /** + * Signs some bytes. + * + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob Reference Documentation} + * @param blobToSign String to sign. + * + * @returns A {@link SignBlobResponse} denoting the keyID and signedBlob in base64 string + */ + async sign(blobToSign) { + await this.sourceClient.getAccessToken(); + const name = `projects/-/serviceAccounts/${this.targetPrincipal}`; + const u = `${this.endpoint}/v1/${name}:signBlob`; + const body = { + delegates: this.delegates, + payload: Buffer.from(blobToSign).toString('base64'), + }; + const res = await this.sourceClient.request({ + ...Impersonated.RETRY_CONFIG, + url: u, + data: body, + method: 'POST', + }); + return res.data; + } + /** The service account email to be impersonated. */ + getTargetPrincipal() { + return this.targetPrincipal; + } + /** + * Refreshes the access token. + */ + async refreshToken() { + var _a, _b, _c, _d, _e, _f; + try { + await this.sourceClient.getAccessToken(); + const name = 'projects/-/serviceAccounts/' + this.targetPrincipal; + const u = `${this.endpoint}/v1/${name}:generateAccessToken`; + const body = { + delegates: this.delegates, + scope: this.targetScopes, + lifetime: this.lifetime + 's', + }; + const res = await this.sourceClient.request({ + ...Impersonated.RETRY_CONFIG, + url: u, + data: body, + method: 'POST', + }); + const tokenResponse = res.data; + this.credentials.access_token = tokenResponse.accessToken; + this.credentials.expiry_date = Date.parse(tokenResponse.expireTime); + return { + tokens: this.credentials, + res, + }; + } + catch (error) { + if (!(error instanceof Error)) + throw error; + let status = 0; + let message = ''; + if (error instanceof gaxios_1.GaxiosError) { + status = (_c = (_b = (_a = error === null || error === void 0 ? void 0 : error.response) === null || _a === void 0 ? void 0 : _a.data) === null || _b === void 0 ? void 0 : _b.error) === null || _c === void 0 ? void 0 : _c.status; + message = (_f = (_e = (_d = error === null || error === void 0 ? void 0 : error.response) === null || _d === void 0 ? void 0 : _d.data) === null || _e === void 0 ? void 0 : _e.error) === null || _f === void 0 ? void 0 : _f.message; + } + if (status && message) { + error.message = `${status}: unable to impersonate: ${message}`; + throw error; + } + else { + error.message = `unable to impersonate: ${error}`; + throw error; + } + } + } + /** + * Generates an OpenID Connect ID token for a service account. + * + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/generateIdToken Reference Documentation} + * + * @param targetAudience the audience for the fetched ID token. + * @param options the for the request + * @return an OpenID Connect ID token + */ + async fetchIdToken(targetAudience, options) { + var _a, _b; + await this.sourceClient.getAccessToken(); + const name = `projects/-/serviceAccounts/${this.targetPrincipal}`; + const u = `${this.endpoint}/v1/${name}:generateIdToken`; + const body = { + delegates: this.delegates, + audience: targetAudience, + includeEmail: (_a = options === null || options === void 0 ? void 0 : options.includeEmail) !== null && _a !== void 0 ? _a : true, + useEmailAzp: (_b = options === null || options === void 0 ? void 0 : options.includeEmail) !== null && _b !== void 0 ? _b : true, + }; + const res = await this.sourceClient.request({ + ...Impersonated.RETRY_CONFIG, + url: u, + data: body, + method: 'POST', + }); + return res.data.token; + } +} +exports.Impersonated = Impersonated; + + +/***/ }), + +/***/ 7060: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2015 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.JWTAccess = void 0; +const jws = __nccwpck_require__(3324); +const util_1 = __nccwpck_require__(7870); +const DEFAULT_HEADER = { + alg: 'RS256', + typ: 'JWT', +}; +class JWTAccess { + /** + * JWTAccess service account credentials. + * + * Create a new access token by using the credential to create a new JWT token + * that's recognized as the access token. + * + * @param email the service account email address. + * @param key the private key that will be used to sign the token. + * @param keyId the ID of the private key used to sign the token. + */ + constructor(email, key, keyId, eagerRefreshThresholdMillis) { + this.cache = new util_1.LRUCache({ + capacity: 500, + maxAge: 60 * 60 * 1000, + }); + this.email = email; + this.key = key; + this.keyId = keyId; + this.eagerRefreshThresholdMillis = + eagerRefreshThresholdMillis !== null && eagerRefreshThresholdMillis !== void 0 ? eagerRefreshThresholdMillis : 5 * 60 * 1000; + } + /** + * Ensures that we're caching a key appropriately, giving precedence to scopes vs. url + * + * @param url The URI being authorized. + * @param scopes The scope or scopes being authorized + * @returns A string that returns the cached key. + */ + getCachedKey(url, scopes) { + let cacheKey = url; + if (scopes && Array.isArray(scopes) && scopes.length) { + cacheKey = url ? `${url}_${scopes.join('_')}` : `${scopes.join('_')}`; + } + else if (typeof scopes === 'string') { + cacheKey = url ? `${url}_${scopes}` : scopes; + } + if (!cacheKey) { + throw Error('Scopes or url must be provided'); + } + return cacheKey; + } + /** + * Get a non-expired access token, after refreshing if necessary. + * + * @param url The URI being authorized. + * @param additionalClaims An object with a set of additional claims to + * include in the payload. + * @returns An object that includes the authorization header. + */ + getRequestHeaders(url, additionalClaims, scopes) { + // Return cached authorization headers, unless we are within + // eagerRefreshThresholdMillis ms of them expiring: + const key = this.getCachedKey(url, scopes); + const cachedToken = this.cache.get(key); + const now = Date.now(); + if (cachedToken && + cachedToken.expiration - now > this.eagerRefreshThresholdMillis) { + return cachedToken.headers; + } + const iat = Math.floor(Date.now() / 1000); + const exp = JWTAccess.getExpirationTime(iat); + let defaultClaims; + // Turn scopes into space-separated string + if (Array.isArray(scopes)) { + scopes = scopes.join(' '); + } + // If scopes are specified, sign with scopes + if (scopes) { + defaultClaims = { + iss: this.email, + sub: this.email, + scope: scopes, + exp, + iat, + }; + } + else { + defaultClaims = { + iss: this.email, + sub: this.email, + aud: url, + exp, + iat, + }; + } + // if additionalClaims are provided, ensure they do not collide with + // other required claims. + if (additionalClaims) { + for (const claim in defaultClaims) { + if (additionalClaims[claim]) { + throw new Error(`The '${claim}' property is not allowed when passing additionalClaims. This claim is included in the JWT by default.`); + } + } + } + const header = this.keyId + ? { ...DEFAULT_HEADER, kid: this.keyId } + : DEFAULT_HEADER; + const payload = Object.assign(defaultClaims, additionalClaims); + // Sign the jwt and add it to the cache + const signedJWT = jws.sign({ header, payload, secret: this.key }); + const headers = { Authorization: `Bearer ${signedJWT}` }; + this.cache.set(key, { + expiration: exp * 1000, + headers, + }); + return headers; + } + /** + * Returns an expiration time for the JWT token. + * + * @param iat The issued at time for the JWT. + * @returns An expiration time for the JWT. + */ + static getExpirationTime(iat) { + const exp = iat + 3600; // 3600 seconds = 1 hour + return exp; + } + /** + * Create a JWTAccess credentials instance using the given input options. + * @param json The input object. + */ + fromJSON(json) { + if (!json) { + throw new Error('Must pass in a JSON object containing the service account auth settings.'); + } + if (!json.client_email) { + throw new Error('The incoming JSON object does not contain a client_email field'); + } + if (!json.private_key) { + throw new Error('The incoming JSON object does not contain a private_key field'); + } + // Extract the relevant information from the json key file. + this.email = json.client_email; + this.key = json.private_key; + this.keyId = json.private_key_id; + this.projectId = json.project_id; + } + fromStream(inputStream, callback) { + if (callback) { + this.fromStreamAsync(inputStream).then(() => callback(), callback); + } + else { + return this.fromStreamAsync(inputStream); + } + } + fromStreamAsync(inputStream) { + return new Promise((resolve, reject) => { + if (!inputStream) { + reject(new Error('Must pass in a stream containing the service account auth settings.')); + } + let s = ''; + inputStream + .setEncoding('utf8') + .on('data', chunk => (s += chunk)) + .on('error', reject) + .on('end', () => { + try { + const data = JSON.parse(s); + this.fromJSON(data); + resolve(); + } + catch (err) { + reject(err); + } + }); + }); + } +} +exports.JWTAccess = JWTAccess; + + +/***/ }), + +/***/ 5277: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2013 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.JWT = void 0; +const gtoken_1 = __nccwpck_require__(8568); +const jwtaccess_1 = __nccwpck_require__(7060); +const oauth2client_1 = __nccwpck_require__(91); +const authclient_1 = __nccwpck_require__(4810); +class JWT extends oauth2client_1.OAuth2Client { + constructor(optionsOrEmail, keyFile, key, scopes, subject, keyId) { + const opts = optionsOrEmail && typeof optionsOrEmail === 'object' + ? optionsOrEmail + : { email: optionsOrEmail, keyFile, key, keyId, scopes, subject }; + super(opts); + this.email = opts.email; + this.keyFile = opts.keyFile; + this.key = opts.key; + this.keyId = opts.keyId; + this.scopes = opts.scopes; + this.subject = opts.subject; + this.additionalClaims = opts.additionalClaims; + // Start with an expired refresh token, which will automatically be + // refreshed before the first API call is made. + this.credentials = { refresh_token: 'jwt-placeholder', expiry_date: 1 }; + } + /** + * Creates a copy of the credential with the specified scopes. + * @param scopes List of requested scopes or a single scope. + * @return The cloned instance. + */ + createScoped(scopes) { + const jwt = new JWT(this); + jwt.scopes = scopes; + return jwt; + } + /** + * Obtains the metadata to be sent with the request. + * + * @param url the URI being authorized. + */ + async getRequestMetadataAsync(url) { + url = this.defaultServicePath ? `https://${this.defaultServicePath}/` : url; + const useSelfSignedJWT = (!this.hasUserScopes() && url) || + (this.useJWTAccessWithScope && this.hasAnyScopes()) || + this.universeDomain !== authclient_1.DEFAULT_UNIVERSE; + if (this.subject && this.universeDomain !== authclient_1.DEFAULT_UNIVERSE) { + throw new RangeError(`Service Account user is configured for the credential. Domain-wide delegation is not supported in universes other than ${authclient_1.DEFAULT_UNIVERSE}`); + } + if (!this.apiKey && useSelfSignedJWT) { + if (this.additionalClaims && + this.additionalClaims.target_audience) { + const { tokens } = await this.refreshToken(); + return { + headers: this.addSharedMetadataHeaders({ + Authorization: `Bearer ${tokens.id_token}`, + }), + }; + } + else { + // no scopes have been set, but a uri has been provided. Use JWTAccess + // credentials. + if (!this.access) { + this.access = new jwtaccess_1.JWTAccess(this.email, this.key, this.keyId, this.eagerRefreshThresholdMillis); + } + let scopes; + if (this.hasUserScopes()) { + scopes = this.scopes; + } + else if (!url) { + scopes = this.defaultScopes; + } + const useScopes = this.useJWTAccessWithScope || + this.universeDomain !== authclient_1.DEFAULT_UNIVERSE; + const headers = await this.access.getRequestHeaders(url !== null && url !== void 0 ? url : undefined, this.additionalClaims, + // Scopes take precedent over audience for signing, + // so we only provide them if `useJWTAccessWithScope` is on or + // if we are in a non-default universe + useScopes ? scopes : undefined); + return { headers: this.addSharedMetadataHeaders(headers) }; + } + } + else if (this.hasAnyScopes() || this.apiKey) { + return super.getRequestMetadataAsync(url); + } + else { + // If no audience, apiKey, or scopes are provided, we should not attempt + // to populate any headers: + return { headers: {} }; + } + } + /** + * Fetches an ID token. + * @param targetAudience the audience for the fetched ID token. + */ + async fetchIdToken(targetAudience) { + // Create a new gToken for fetching an ID token + const gtoken = new gtoken_1.GoogleToken({ + iss: this.email, + sub: this.subject, + scope: this.scopes || this.defaultScopes, + keyFile: this.keyFile, + key: this.key, + additionalClaims: { target_audience: targetAudience }, + transporter: this.transporter, + }); + await gtoken.getToken({ + forceRefresh: true, + }); + if (!gtoken.idToken) { + throw new Error('Unknown error: Failed to fetch ID token'); + } + return gtoken.idToken; + } + /** + * Determine if there are currently scopes available. + */ + hasUserScopes() { + if (!this.scopes) { + return false; + } + return this.scopes.length > 0; + } + /** + * Are there any default or user scopes defined. + */ + hasAnyScopes() { + if (this.scopes && this.scopes.length > 0) + return true; + if (this.defaultScopes && this.defaultScopes.length > 0) + return true; + return false; + } + authorize(callback) { + if (callback) { + this.authorizeAsync().then(r => callback(null, r), callback); + } + else { + return this.authorizeAsync(); + } + } + async authorizeAsync() { + const result = await this.refreshToken(); + if (!result) { + throw new Error('No result returned'); + } + this.credentials = result.tokens; + this.credentials.refresh_token = 'jwt-placeholder'; + this.key = this.gtoken.key; + this.email = this.gtoken.iss; + return result.tokens; + } + /** + * Refreshes the access token. + * @param refreshToken ignored + * @private + */ + async refreshTokenNoCache( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + refreshToken) { + const gtoken = this.createGToken(); + const token = await gtoken.getToken({ + forceRefresh: this.isTokenExpiring(), + }); + const tokens = { + access_token: token.access_token, + token_type: 'Bearer', + expiry_date: gtoken.expiresAt, + id_token: gtoken.idToken, + }; + this.emit('tokens', tokens); + return { res: null, tokens }; + } + /** + * Create a gToken if it doesn't already exist. + */ + createGToken() { + if (!this.gtoken) { + this.gtoken = new gtoken_1.GoogleToken({ + iss: this.email, + sub: this.subject, + scope: this.scopes || this.defaultScopes, + keyFile: this.keyFile, + key: this.key, + additionalClaims: this.additionalClaims, + transporter: this.transporter, + }); + } + return this.gtoken; + } + /** + * Create a JWT credentials instance using the given input options. + * @param json The input object. + * + * @remarks + * + * **Important**: If you accept a credential configuration (credential JSON/File/Stream) from an external source for authentication to Google Cloud, you must validate it before providing it to any Google API or library. Providing an unvalidated credential configuration to Google APIs can compromise the security of your systems and data. For more information, refer to {@link https://cloud.google.com/docs/authentication/external/externally-sourced-credentials Validate credential configurations from external sources}. + */ + fromJSON(json) { + if (!json) { + throw new Error('Must pass in a JSON object containing the service account auth settings.'); + } + if (!json.client_email) { + throw new Error('The incoming JSON object does not contain a client_email field'); + } + if (!json.private_key) { + throw new Error('The incoming JSON object does not contain a private_key field'); + } + // Extract the relevant information from the json key file. + this.email = json.client_email; + this.key = json.private_key; + this.keyId = json.private_key_id; + this.projectId = json.project_id; + this.quotaProjectId = json.quota_project_id; + this.universeDomain = json.universe_domain || this.universeDomain; + } + fromStream(inputStream, callback) { + if (callback) { + this.fromStreamAsync(inputStream).then(() => callback(), callback); + } + else { + return this.fromStreamAsync(inputStream); + } + } + fromStreamAsync(inputStream) { + return new Promise((resolve, reject) => { + if (!inputStream) { + throw new Error('Must pass in a stream containing the service account auth settings.'); + } + let s = ''; + inputStream + .setEncoding('utf8') + .on('error', reject) + .on('data', chunk => (s += chunk)) + .on('end', () => { + try { + const data = JSON.parse(s); + this.fromJSON(data); + resolve(); + } + catch (e) { + reject(e); + } + }); + }); + } + /** + * Creates a JWT credentials instance using an API Key for authentication. + * @param apiKey The API Key in string form. + */ + fromAPIKey(apiKey) { + if (typeof apiKey !== 'string') { + throw new Error('Must provide an API Key string.'); + } + this.apiKey = apiKey; + } + /** + * Using the key or keyFile on the JWT client, obtain an object that contains + * the key and the client email. + */ + async getCredentials() { + if (this.key) { + return { private_key: this.key, client_email: this.email }; + } + else if (this.keyFile) { + const gtoken = this.createGToken(); + const creds = await gtoken.getCredentials(this.keyFile); + return { private_key: creds.privateKey, client_email: creds.clientEmail }; + } + throw new Error('A key or a keyFile must be provided to getCredentials.'); + } +} +exports.JWT = JWT; + + +/***/ }), + +/***/ 3882: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2014 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.LoginTicket = void 0; +class LoginTicket { + /** + * Create a simple class to extract user ID from an ID Token + * + * @param {string} env Envelope of the jwt + * @param {TokenPayload} pay Payload of the jwt + * @constructor + */ + constructor(env, pay) { + this.envelope = env; + this.payload = pay; + } + getEnvelope() { + return this.envelope; + } + getPayload() { + return this.payload; + } + /** + * Create a simple class to extract user ID from an ID Token + * + * @return The user ID + */ + getUserId() { + const payload = this.getPayload(); + if (payload && payload.sub) { + return payload.sub; + } + return null; + } + /** + * Returns attributes from the login ticket. This can contain + * various information about the user session. + * + * @return The envelope and payload + */ + getAttributes() { + return { envelope: this.getEnvelope(), payload: this.getPayload() }; + } +} +exports.LoginTicket = LoginTicket; + + +/***/ }), + +/***/ 91: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.OAuth2Client = exports.ClientAuthentication = exports.CertificateFormat = exports.CodeChallengeMethod = void 0; +const gaxios_1 = __nccwpck_require__(7003); +const querystring = __nccwpck_require__(3480); +const stream = __nccwpck_require__(2203); +const formatEcdsa = __nccwpck_require__(325); +const crypto_1 = __nccwpck_require__(8851); +const authclient_1 = __nccwpck_require__(4810); +const loginticket_1 = __nccwpck_require__(3882); +var CodeChallengeMethod; +(function (CodeChallengeMethod) { + CodeChallengeMethod["Plain"] = "plain"; + CodeChallengeMethod["S256"] = "S256"; +})(CodeChallengeMethod || (exports.CodeChallengeMethod = CodeChallengeMethod = {})); +var CertificateFormat; +(function (CertificateFormat) { + CertificateFormat["PEM"] = "PEM"; + CertificateFormat["JWK"] = "JWK"; +})(CertificateFormat || (exports.CertificateFormat = CertificateFormat = {})); +/** + * The client authentication type. Supported values are basic, post, and none. + * https://datatracker.ietf.org/doc/html/rfc7591#section-2 + */ +var ClientAuthentication; +(function (ClientAuthentication) { + ClientAuthentication["ClientSecretPost"] = "ClientSecretPost"; + ClientAuthentication["ClientSecretBasic"] = "ClientSecretBasic"; + ClientAuthentication["None"] = "None"; +})(ClientAuthentication || (exports.ClientAuthentication = ClientAuthentication = {})); +class OAuth2Client extends authclient_1.AuthClient { + constructor(optionsOrClientId, clientSecret, redirectUri) { + const opts = optionsOrClientId && typeof optionsOrClientId === 'object' + ? optionsOrClientId + : { clientId: optionsOrClientId, clientSecret, redirectUri }; + super(opts); + this.certificateCache = {}; + this.certificateExpiry = null; + this.certificateCacheFormat = CertificateFormat.PEM; + this.refreshTokenPromises = new Map(); + this._clientId = opts.clientId; + this._clientSecret = opts.clientSecret; + this.redirectUri = opts.redirectUri; + this.endpoints = { + tokenInfoUrl: 'https://oauth2.googleapis.com/tokeninfo', + oauth2AuthBaseUrl: 'https://accounts.google.com/o/oauth2/v2/auth', + oauth2TokenUrl: 'https://oauth2.googleapis.com/token', + oauth2RevokeUrl: 'https://oauth2.googleapis.com/revoke', + oauth2FederatedSignonPemCertsUrl: 'https://www.googleapis.com/oauth2/v1/certs', + oauth2FederatedSignonJwkCertsUrl: 'https://www.googleapis.com/oauth2/v3/certs', + oauth2IapPublicKeyUrl: 'https://www.gstatic.com/iap/verify/public_key', + ...opts.endpoints, + }; + this.clientAuthentication = + opts.clientAuthentication || ClientAuthentication.ClientSecretPost; + this.issuers = opts.issuers || [ + 'accounts.google.com', + 'https://accounts.google.com', + this.universeDomain, + ]; + } + /** + * Generates URL for consent page landing. + * @param opts Options. + * @return URL to consent page. + */ + generateAuthUrl(opts = {}) { + if (opts.code_challenge_method && !opts.code_challenge) { + throw new Error('If a code_challenge_method is provided, code_challenge must be included.'); + } + opts.response_type = opts.response_type || 'code'; + opts.client_id = opts.client_id || this._clientId; + opts.redirect_uri = opts.redirect_uri || this.redirectUri; + // Allow scopes to be passed either as array or a string + if (Array.isArray(opts.scope)) { + opts.scope = opts.scope.join(' '); + } + const rootUrl = this.endpoints.oauth2AuthBaseUrl.toString(); + return (rootUrl + + '?' + + querystring.stringify(opts)); + } + generateCodeVerifier() { + // To make the code compatible with browser SubtleCrypto we need to make + // this method async. + throw new Error('generateCodeVerifier is removed, please use generateCodeVerifierAsync instead.'); + } + /** + * Convenience method to automatically generate a code_verifier, and its + * resulting SHA256. If used, this must be paired with a S256 + * code_challenge_method. + * + * For a full example see: + * https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/oauth2-codeVerifier.js + */ + async generateCodeVerifierAsync() { + // base64 encoding uses 6 bits per character, and we want to generate128 + // characters. 6*128/8 = 96. + const crypto = (0, crypto_1.createCrypto)(); + const randomString = crypto.randomBytesBase64(96); + // The valid characters in the code_verifier are [A-Z]/[a-z]/[0-9]/ + // "-"/"."/"_"/"~". Base64 encoded strings are pretty close, so we're just + // swapping out a few chars. + const codeVerifier = randomString + .replace(/\+/g, '~') + .replace(/=/g, '_') + .replace(/\//g, '-'); + // Generate the base64 encoded SHA256 + const unencodedCodeChallenge = await crypto.sha256DigestBase64(codeVerifier); + // We need to use base64UrlEncoding instead of standard base64 + const codeChallenge = unencodedCodeChallenge + .split('=')[0] + .replace(/\+/g, '-') + .replace(/\//g, '_'); + return { codeVerifier, codeChallenge }; + } + getToken(codeOrOptions, callback) { + const options = typeof codeOrOptions === 'string' ? { code: codeOrOptions } : codeOrOptions; + if (callback) { + this.getTokenAsync(options).then(r => callback(null, r.tokens, r.res), e => callback(e, null, e.response)); + } + else { + return this.getTokenAsync(options); + } + } + async getTokenAsync(options) { + const url = this.endpoints.oauth2TokenUrl.toString(); + const headers = { + 'Content-Type': 'application/x-www-form-urlencoded', + }; + const values = { + client_id: options.client_id || this._clientId, + code_verifier: options.codeVerifier, + code: options.code, + grant_type: 'authorization_code', + redirect_uri: options.redirect_uri || this.redirectUri, + }; + if (this.clientAuthentication === ClientAuthentication.ClientSecretBasic) { + const basic = Buffer.from(`${this._clientId}:${this._clientSecret}`); + headers['Authorization'] = `Basic ${basic.toString('base64')}`; + } + if (this.clientAuthentication === ClientAuthentication.ClientSecretPost) { + values.client_secret = this._clientSecret; + } + const res = await this.transporter.request({ + ...OAuth2Client.RETRY_CONFIG, + method: 'POST', + url, + data: querystring.stringify(values), + headers, + }); + const tokens = res.data; + if (res.data && res.data.expires_in) { + tokens.expiry_date = new Date().getTime() + res.data.expires_in * 1000; + delete tokens.expires_in; + } + this.emit('tokens', tokens); + return { tokens, res }; + } + /** + * Refreshes the access token. + * @param refresh_token Existing refresh token. + * @private + */ + async refreshToken(refreshToken) { + if (!refreshToken) { + return this.refreshTokenNoCache(refreshToken); + } + // If a request to refresh using the same token has started, + // return the same promise. + if (this.refreshTokenPromises.has(refreshToken)) { + return this.refreshTokenPromises.get(refreshToken); + } + const p = this.refreshTokenNoCache(refreshToken).then(r => { + this.refreshTokenPromises.delete(refreshToken); + return r; + }, e => { + this.refreshTokenPromises.delete(refreshToken); + throw e; + }); + this.refreshTokenPromises.set(refreshToken, p); + return p; + } + async refreshTokenNoCache(refreshToken) { + var _a; + if (!refreshToken) { + throw new Error('No refresh token is set.'); + } + const url = this.endpoints.oauth2TokenUrl.toString(); + const data = { + refresh_token: refreshToken, + client_id: this._clientId, + client_secret: this._clientSecret, + grant_type: 'refresh_token', + }; + let res; + try { + // request for new token + res = await this.transporter.request({ + ...OAuth2Client.RETRY_CONFIG, + method: 'POST', + url, + data: querystring.stringify(data), + headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, + }); + } + catch (e) { + if (e instanceof gaxios_1.GaxiosError && + e.message === 'invalid_grant' && + ((_a = e.response) === null || _a === void 0 ? void 0 : _a.data) && + /ReAuth/i.test(e.response.data.error_description)) { + e.message = JSON.stringify(e.response.data); + } + throw e; + } + const tokens = res.data; + // TODO: de-duplicate this code from a few spots + if (res.data && res.data.expires_in) { + tokens.expiry_date = new Date().getTime() + res.data.expires_in * 1000; + delete tokens.expires_in; + } + this.emit('tokens', tokens); + return { tokens, res }; + } + refreshAccessToken(callback) { + if (callback) { + this.refreshAccessTokenAsync().then(r => callback(null, r.credentials, r.res), callback); + } + else { + return this.refreshAccessTokenAsync(); + } + } + async refreshAccessTokenAsync() { + const r = await this.refreshToken(this.credentials.refresh_token); + const tokens = r.tokens; + tokens.refresh_token = this.credentials.refresh_token; + this.credentials = tokens; + return { credentials: this.credentials, res: r.res }; + } + getAccessToken(callback) { + if (callback) { + this.getAccessTokenAsync().then(r => callback(null, r.token, r.res), callback); + } + else { + return this.getAccessTokenAsync(); + } + } + async getAccessTokenAsync() { + const shouldRefresh = !this.credentials.access_token || this.isTokenExpiring(); + if (shouldRefresh) { + if (!this.credentials.refresh_token) { + if (this.refreshHandler) { + const refreshedAccessToken = await this.processAndValidateRefreshHandler(); + if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { + this.setCredentials(refreshedAccessToken); + return { token: this.credentials.access_token }; + } + } + else { + throw new Error('No refresh token or refresh handler callback is set.'); + } + } + const r = await this.refreshAccessTokenAsync(); + if (!r.credentials || (r.credentials && !r.credentials.access_token)) { + throw new Error('Could not refresh access token.'); + } + return { token: r.credentials.access_token, res: r.res }; + } + else { + return { token: this.credentials.access_token }; + } + } + /** + * The main authentication interface. It takes an optional url which when + * present is the endpoint being accessed, and returns a Promise which + * resolves with authorization header fields. + * + * In OAuth2Client, the result has the form: + * { Authorization: 'Bearer ' } + * @param url The optional url being authorized + */ + async getRequestHeaders(url) { + const headers = (await this.getRequestMetadataAsync(url)).headers; + return headers; + } + async getRequestMetadataAsync( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + url) { + const thisCreds = this.credentials; + if (!thisCreds.access_token && + !thisCreds.refresh_token && + !this.apiKey && + !this.refreshHandler) { + throw new Error('No access, refresh token, API key or refresh handler callback is set.'); + } + if (thisCreds.access_token && !this.isTokenExpiring()) { + thisCreds.token_type = thisCreds.token_type || 'Bearer'; + const headers = { + Authorization: thisCreds.token_type + ' ' + thisCreds.access_token, + }; + return { headers: this.addSharedMetadataHeaders(headers) }; + } + // If refreshHandler exists, call processAndValidateRefreshHandler(). + if (this.refreshHandler) { + const refreshedAccessToken = await this.processAndValidateRefreshHandler(); + if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { + this.setCredentials(refreshedAccessToken); + const headers = { + Authorization: 'Bearer ' + this.credentials.access_token, + }; + return { headers: this.addSharedMetadataHeaders(headers) }; + } + } + if (this.apiKey) { + return { headers: { 'X-Goog-Api-Key': this.apiKey } }; + } + let r = null; + let tokens = null; + try { + r = await this.refreshToken(thisCreds.refresh_token); + tokens = r.tokens; + } + catch (err) { + const e = err; + if (e.response && + (e.response.status === 403 || e.response.status === 404)) { + e.message = `Could not refresh access token: ${e.message}`; + } + throw e; + } + const credentials = this.credentials; + credentials.token_type = credentials.token_type || 'Bearer'; + tokens.refresh_token = credentials.refresh_token; + this.credentials = tokens; + const headers = { + Authorization: credentials.token_type + ' ' + tokens.access_token, + }; + return { headers: this.addSharedMetadataHeaders(headers), res: r.res }; + } + /** + * Generates an URL to revoke the given token. + * @param token The existing token to be revoked. + * + * @deprecated use instance method {@link OAuth2Client.getRevokeTokenURL} + */ + static getRevokeTokenUrl(token) { + return new OAuth2Client().getRevokeTokenURL(token).toString(); + } + /** + * Generates a URL to revoke the given token. + * + * @param token The existing token to be revoked. + */ + getRevokeTokenURL(token) { + const url = new URL(this.endpoints.oauth2RevokeUrl); + url.searchParams.append('token', token); + return url; + } + revokeToken(token, callback) { + const opts = { + ...OAuth2Client.RETRY_CONFIG, + url: this.getRevokeTokenURL(token).toString(), + method: 'POST', + }; + if (callback) { + this.transporter + .request(opts) + .then(r => callback(null, r), callback); + } + else { + return this.transporter.request(opts); + } + } + revokeCredentials(callback) { + if (callback) { + this.revokeCredentialsAsync().then(res => callback(null, res), callback); + } + else { + return this.revokeCredentialsAsync(); + } + } + async revokeCredentialsAsync() { + const token = this.credentials.access_token; + this.credentials = {}; + if (token) { + return this.revokeToken(token); + } + else { + throw new Error('No access token to revoke.'); + } + } + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); + }); + } + else { + return this.requestAsync(opts); + } + } + async requestAsync(opts, reAuthRetried = false) { + let r2; + try { + const r = await this.getRequestMetadataAsync(opts.url); + opts.headers = opts.headers || {}; + if (r.headers && r.headers['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = r.headers['x-goog-user-project']; + } + if (r.headers && r.headers.Authorization) { + opts.headers.Authorization = r.headers.Authorization; + } + if (this.apiKey) { + opts.headers['X-Goog-Api-Key'] = this.apiKey; + } + r2 = await this.transporter.request(opts); + } + catch (e) { + const res = e.response; + if (res) { + const statusCode = res.status; + // Retry the request for metadata if the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - An access_token and refresh_token were available, but either no + // expiry_date was available or the forceRefreshOnFailure flag is set. + // The absent expiry_date case can happen when developers stash the + // access_token and refresh_token for later use, but the access_token + // fails on the first try because it's expired. Some developers may + // choose to enable forceRefreshOnFailure to mitigate time-related + // errors. + // Or the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - No refresh_token was available + // - An access_token and a refreshHandler callback were available, but + // either no expiry_date was available or the forceRefreshOnFailure + // flag is set. The access_token fails on the first try because it's + // expired. Some developers may choose to enable forceRefreshOnFailure + // to mitigate time-related errors. + const mayRequireRefresh = this.credentials && + this.credentials.access_token && + this.credentials.refresh_token && + (!this.credentials.expiry_date || this.forceRefreshOnFailure); + const mayRequireRefreshWithNoRefreshToken = this.credentials && + this.credentials.access_token && + !this.credentials.refresh_token && + (!this.credentials.expiry_date || this.forceRefreshOnFailure) && + this.refreshHandler; + const isReadableStream = res.config.data instanceof stream.Readable; + const isAuthErr = statusCode === 401 || statusCode === 403; + if (!reAuthRetried && + isAuthErr && + !isReadableStream && + mayRequireRefresh) { + await this.refreshAccessTokenAsync(); + return this.requestAsync(opts, true); + } + else if (!reAuthRetried && + isAuthErr && + !isReadableStream && + mayRequireRefreshWithNoRefreshToken) { + const refreshedAccessToken = await this.processAndValidateRefreshHandler(); + if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { + this.setCredentials(refreshedAccessToken); + } + return this.requestAsync(opts, true); + } + } + throw e; + } + return r2; + } + verifyIdToken(options, callback) { + // This function used to accept two arguments instead of an options object. + // Check the types to help users upgrade with less pain. + // This check can be removed after a 2.0 release. + if (callback && typeof callback !== 'function') { + throw new Error('This method accepts an options object as the first parameter, which includes the idToken, audience, and maxExpiry.'); + } + if (callback) { + this.verifyIdTokenAsync(options).then(r => callback(null, r), callback); + } + else { + return this.verifyIdTokenAsync(options); + } + } + async verifyIdTokenAsync(options) { + if (!options.idToken) { + throw new Error('The verifyIdToken method requires an ID Token'); + } + const response = await this.getFederatedSignonCertsAsync(); + const login = await this.verifySignedJwtWithCertsAsync(options.idToken, response.certs, options.audience, this.issuers, options.maxExpiry); + return login; + } + /** + * Obtains information about the provisioned access token. Especially useful + * if you want to check the scopes that were provisioned to a given token. + * + * @param accessToken Required. The Access Token for which you want to get + * user info. + */ + async getTokenInfo(accessToken) { + const { data } = await this.transporter.request({ + ...OAuth2Client.RETRY_CONFIG, + method: 'POST', + headers: { + 'Content-Type': 'application/x-www-form-urlencoded', + Authorization: `Bearer ${accessToken}`, + }, + url: this.endpoints.tokenInfoUrl.toString(), + }); + const info = Object.assign({ + expiry_date: new Date().getTime() + data.expires_in * 1000, + scopes: data.scope.split(' '), + }, data); + delete info.expires_in; + delete info.scope; + return info; + } + getFederatedSignonCerts(callback) { + if (callback) { + this.getFederatedSignonCertsAsync().then(r => callback(null, r.certs, r.res), callback); + } + else { + return this.getFederatedSignonCertsAsync(); + } + } + async getFederatedSignonCertsAsync() { + const nowTime = new Date().getTime(); + const format = (0, crypto_1.hasBrowserCrypto)() + ? CertificateFormat.JWK + : CertificateFormat.PEM; + if (this.certificateExpiry && + nowTime < this.certificateExpiry.getTime() && + this.certificateCacheFormat === format) { + return { certs: this.certificateCache, format }; + } + let res; + let url; + switch (format) { + case CertificateFormat.PEM: + url = this.endpoints.oauth2FederatedSignonPemCertsUrl.toString(); + break; + case CertificateFormat.JWK: + url = this.endpoints.oauth2FederatedSignonJwkCertsUrl.toString(); + break; + default: + throw new Error(`Unsupported certificate format ${format}`); + } + try { + res = await this.transporter.request({ + ...OAuth2Client.RETRY_CONFIG, + url, + }); + } + catch (e) { + if (e instanceof Error) { + e.message = `Failed to retrieve verification certificates: ${e.message}`; + } + throw e; + } + const cacheControl = res ? res.headers['cache-control'] : undefined; + let cacheAge = -1; + if (cacheControl) { + const pattern = new RegExp('max-age=([0-9]*)'); + const regexResult = pattern.exec(cacheControl); + if (regexResult && regexResult.length === 2) { + // Cache results with max-age (in seconds) + cacheAge = Number(regexResult[1]) * 1000; // milliseconds + } + } + let certificates = {}; + switch (format) { + case CertificateFormat.PEM: + certificates = res.data; + break; + case CertificateFormat.JWK: + for (const key of res.data.keys) { + certificates[key.kid] = key; + } + break; + default: + throw new Error(`Unsupported certificate format ${format}`); + } + const now = new Date(); + this.certificateExpiry = + cacheAge === -1 ? null : new Date(now.getTime() + cacheAge); + this.certificateCache = certificates; + this.certificateCacheFormat = format; + return { certs: certificates, format, res }; + } + getIapPublicKeys(callback) { + if (callback) { + this.getIapPublicKeysAsync().then(r => callback(null, r.pubkeys, r.res), callback); + } + else { + return this.getIapPublicKeysAsync(); + } + } + async getIapPublicKeysAsync() { + let res; + const url = this.endpoints.oauth2IapPublicKeyUrl.toString(); + try { + res = await this.transporter.request({ + ...OAuth2Client.RETRY_CONFIG, + url, + }); + } + catch (e) { + if (e instanceof Error) { + e.message = `Failed to retrieve verification certificates: ${e.message}`; + } + throw e; + } + return { pubkeys: res.data, res }; + } + verifySignedJwtWithCerts() { + // To make the code compatible with browser SubtleCrypto we need to make + // this method async. + throw new Error('verifySignedJwtWithCerts is removed, please use verifySignedJwtWithCertsAsync instead.'); + } + /** + * Verify the id token is signed with the correct certificate + * and is from the correct audience. + * @param jwt The jwt to verify (The ID Token in this case). + * @param certs The array of certs to test the jwt against. + * @param requiredAudience The audience to test the jwt against. + * @param issuers The allowed issuers of the jwt (Optional). + * @param maxExpiry The max expiry the certificate can be (Optional). + * @return Returns a promise resolving to LoginTicket on verification. + */ + async verifySignedJwtWithCertsAsync(jwt, certs, requiredAudience, issuers, maxExpiry) { + const crypto = (0, crypto_1.createCrypto)(); + if (!maxExpiry) { + maxExpiry = OAuth2Client.DEFAULT_MAX_TOKEN_LIFETIME_SECS_; + } + const segments = jwt.split('.'); + if (segments.length !== 3) { + throw new Error('Wrong number of segments in token: ' + jwt); + } + const signed = segments[0] + '.' + segments[1]; + let signature = segments[2]; + let envelope; + let payload; + try { + envelope = JSON.parse(crypto.decodeBase64StringUtf8(segments[0])); + } + catch (err) { + if (err instanceof Error) { + err.message = `Can't parse token envelope: ${segments[0]}': ${err.message}`; + } + throw err; + } + if (!envelope) { + throw new Error("Can't parse token envelope: " + segments[0]); + } + try { + payload = JSON.parse(crypto.decodeBase64StringUtf8(segments[1])); + } + catch (err) { + if (err instanceof Error) { + err.message = `Can't parse token payload '${segments[0]}`; + } + throw err; + } + if (!payload) { + throw new Error("Can't parse token payload: " + segments[1]); + } + if (!Object.prototype.hasOwnProperty.call(certs, envelope.kid)) { + // If this is not present, then there's no reason to attempt verification + throw new Error('No pem found for envelope: ' + JSON.stringify(envelope)); + } + const cert = certs[envelope.kid]; + if (envelope.alg === 'ES256') { + signature = formatEcdsa.joseToDer(signature, 'ES256').toString('base64'); + } + const verified = await crypto.verify(cert, signed, signature); + if (!verified) { + throw new Error('Invalid token signature: ' + jwt); + } + if (!payload.iat) { + throw new Error('No issue time in token: ' + JSON.stringify(payload)); + } + if (!payload.exp) { + throw new Error('No expiration time in token: ' + JSON.stringify(payload)); + } + const iat = Number(payload.iat); + if (isNaN(iat)) + throw new Error('iat field using invalid format'); + const exp = Number(payload.exp); + if (isNaN(exp)) + throw new Error('exp field using invalid format'); + const now = new Date().getTime() / 1000; + if (exp >= now + maxExpiry) { + throw new Error('Expiration time too far in future: ' + JSON.stringify(payload)); + } + const earliest = iat - OAuth2Client.CLOCK_SKEW_SECS_; + const latest = exp + OAuth2Client.CLOCK_SKEW_SECS_; + if (now < earliest) { + throw new Error('Token used too early, ' + + now + + ' < ' + + earliest + + ': ' + + JSON.stringify(payload)); + } + if (now > latest) { + throw new Error('Token used too late, ' + + now + + ' > ' + + latest + + ': ' + + JSON.stringify(payload)); + } + if (issuers && issuers.indexOf(payload.iss) < 0) { + throw new Error('Invalid issuer, expected one of [' + + issuers + + '], but got ' + + payload.iss); + } + // Check the audience matches if we have one + if (typeof requiredAudience !== 'undefined' && requiredAudience !== null) { + const aud = payload.aud; + let audVerified = false; + // If the requiredAudience is an array, check if it contains token + // audience + if (requiredAudience.constructor === Array) { + audVerified = requiredAudience.indexOf(aud) > -1; + } + else { + audVerified = aud === requiredAudience; + } + if (!audVerified) { + throw new Error('Wrong recipient, payload audience != requiredAudience'); + } + } + return new loginticket_1.LoginTicket(envelope, payload); + } + /** + * Returns a promise that resolves with AccessTokenResponse type if + * refreshHandler is defined. + * If not, nothing is returned. + */ + async processAndValidateRefreshHandler() { + if (this.refreshHandler) { + const accessTokenResponse = await this.refreshHandler(); + if (!accessTokenResponse.access_token) { + throw new Error('No access token is returned by the refreshHandler callback.'); + } + return accessTokenResponse; + } + return; + } + /** + * Returns true if a token is expired or will expire within + * eagerRefreshThresholdMillismilliseconds. + * If there is no expiry time, assumes the token is not expired or expiring. + */ + isTokenExpiring() { + const expiryDate = this.credentials.expiry_date; + return expiryDate + ? expiryDate <= new Date().getTime() + this.eagerRefreshThresholdMillis + : false; + } +} +exports.OAuth2Client = OAuth2Client; +/** + * @deprecated use instance's {@link OAuth2Client.endpoints} + */ +OAuth2Client.GOOGLE_TOKEN_INFO_URL = 'https://oauth2.googleapis.com/tokeninfo'; +/** + * Clock skew - five minutes in seconds + */ +OAuth2Client.CLOCK_SKEW_SECS_ = 300; +/** + * The default max Token Lifetime is one day in seconds + */ +OAuth2Client.DEFAULT_MAX_TOKEN_LIFETIME_SECS_ = 86400; + + +/***/ }), + +/***/ 6653: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.OAuthClientAuthHandler = void 0; +exports.getErrorFromOAuthErrorResponse = getErrorFromOAuthErrorResponse; +const querystring = __nccwpck_require__(3480); +const crypto_1 = __nccwpck_require__(8851); +/** List of HTTP methods that accept request bodies. */ +const METHODS_SUPPORTING_REQUEST_BODY = ['PUT', 'POST', 'PATCH']; +/** + * Abstract class for handling client authentication in OAuth-based + * operations. + * When request-body client authentication is used, only application/json and + * application/x-www-form-urlencoded content types for HTTP methods that support + * request bodies are supported. + */ +class OAuthClientAuthHandler { + /** + * Instantiates an OAuth client authentication handler. + * @param clientAuthentication The client auth credentials. + */ + constructor(clientAuthentication) { + this.clientAuthentication = clientAuthentication; + this.crypto = (0, crypto_1.createCrypto)(); + } + /** + * Applies client authentication on the OAuth request's headers or POST + * body but does not process the request. + * @param opts The GaxiosOptions whose headers or data are to be modified + * depending on the client authentication mechanism to be used. + * @param bearerToken The optional bearer token to use for authentication. + * When this is used, no client authentication credentials are needed. + */ + applyClientAuthenticationOptions(opts, bearerToken) { + // Inject authenticated header. + this.injectAuthenticatedHeaders(opts, bearerToken); + // Inject authenticated request body. + if (!bearerToken) { + this.injectAuthenticatedRequestBody(opts); + } + } + /** + * Applies client authentication on the request's header if either + * basic authentication or bearer token authentication is selected. + * + * @param opts The GaxiosOptions whose headers or data are to be modified + * depending on the client authentication mechanism to be used. + * @param bearerToken The optional bearer token to use for authentication. + * When this is used, no client authentication credentials are needed. + */ + injectAuthenticatedHeaders(opts, bearerToken) { + var _a; + // Bearer token prioritized higher than basic Auth. + if (bearerToken) { + opts.headers = opts.headers || {}; + Object.assign(opts.headers, { + Authorization: `Bearer ${bearerToken}}`, + }); + } + else if (((_a = this.clientAuthentication) === null || _a === void 0 ? void 0 : _a.confidentialClientType) === 'basic') { + opts.headers = opts.headers || {}; + const clientId = this.clientAuthentication.clientId; + const clientSecret = this.clientAuthentication.clientSecret || ''; + const base64EncodedCreds = this.crypto.encodeBase64StringUtf8(`${clientId}:${clientSecret}`); + Object.assign(opts.headers, { + Authorization: `Basic ${base64EncodedCreds}`, + }); + } + } + /** + * Applies client authentication on the request's body if request-body + * client authentication is selected. + * + * @param opts The GaxiosOptions whose headers or data are to be modified + * depending on the client authentication mechanism to be used. + */ + injectAuthenticatedRequestBody(opts) { + var _a; + if (((_a = this.clientAuthentication) === null || _a === void 0 ? void 0 : _a.confidentialClientType) === 'request-body') { + const method = (opts.method || 'GET').toUpperCase(); + // Inject authenticated request body. + if (METHODS_SUPPORTING_REQUEST_BODY.indexOf(method) !== -1) { + // Get content-type. + let contentType; + const headers = opts.headers || {}; + for (const key in headers) { + if (key.toLowerCase() === 'content-type' && headers[key]) { + contentType = headers[key].toLowerCase(); + break; + } + } + if (contentType === 'application/x-www-form-urlencoded') { + opts.data = opts.data || ''; + const data = querystring.parse(opts.data); + Object.assign(data, { + client_id: this.clientAuthentication.clientId, + client_secret: this.clientAuthentication.clientSecret || '', + }); + opts.data = querystring.stringify(data); + } + else if (contentType === 'application/json') { + opts.data = opts.data || {}; + Object.assign(opts.data, { + client_id: this.clientAuthentication.clientId, + client_secret: this.clientAuthentication.clientSecret || '', + }); + } + else { + throw new Error(`${contentType} content-types are not supported with ` + + `${this.clientAuthentication.confidentialClientType} ` + + 'client authentication'); + } + } + else { + throw new Error(`${method} HTTP method does not support ` + + `${this.clientAuthentication.confidentialClientType} ` + + 'client authentication'); + } + } + } + /** + * Retry config for Auth-related requests. + * + * @remarks + * + * This is not a part of the default {@link AuthClient.transporter transporter/gaxios} + * config as some downstream APIs would prefer if customers explicitly enable retries, + * such as GCS. + */ + static get RETRY_CONFIG() { + return { + retry: true, + retryConfig: { + httpMethodsToRetry: ['GET', 'PUT', 'POST', 'HEAD', 'OPTIONS', 'DELETE'], + }, + }; + } +} +exports.OAuthClientAuthHandler = OAuthClientAuthHandler; +/** + * Converts an OAuth error response to a native JavaScript Error. + * @param resp The OAuth error response to convert to a native Error object. + * @param err The optional original error. If provided, the error properties + * will be copied to the new error. + * @return The converted native Error object. + */ +function getErrorFromOAuthErrorResponse(resp, err) { + // Error response. + const errorCode = resp.error; + const errorDescription = resp.error_description; + const errorUri = resp.error_uri; + let message = `Error code ${errorCode}`; + if (typeof errorDescription !== 'undefined') { + message += `: ${errorDescription}`; + } + if (typeof errorUri !== 'undefined') { + message += ` - ${errorUri}`; + } + const newError = new Error(message); + // Copy properties from original error to newly generated error. + if (err) { + const keys = Object.keys(err); + if (err.stack) { + // Copy error.stack if available. + keys.push('stack'); + } + keys.forEach(key => { + // Do not overwrite the message field. + if (key !== 'message') { + Object.defineProperty(newError, key, { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + value: err[key], + writable: false, + enumerable: true, + }); + } + }); + } + return newError; +} + + +/***/ }), + +/***/ 2045: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PassThroughClient = void 0; +const authclient_1 = __nccwpck_require__(4810); +/** + * An AuthClient without any Authentication information. Useful for: + * - Anonymous access + * - Local Emulators + * - Testing Environments + * + */ +class PassThroughClient extends authclient_1.AuthClient { + /** + * Creates a request without any authentication headers or checks. + * + * @remarks + * + * In testing environments it may be useful to change the provided + * {@link AuthClient.transporter} for any desired request overrides/handling. + * + * @param opts + * @returns The response of the request. + */ + async request(opts) { + return this.transporter.request(opts); + } + /** + * A required method of the base class. + * Always will return an empty object. + * + * @returns {} + */ + async getAccessToken() { + return {}; + } + /** + * A required method of the base class. + * Always will return an empty object. + * + * @returns {} + */ + async getRequestHeaders() { + return {}; + } +} +exports.PassThroughClient = PassThroughClient; +const a = new PassThroughClient(); +a.getAccessToken(); + + +/***/ }), + +/***/ 6077: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PluggableAuthClient = exports.ExecutableError = void 0; +const baseexternalclient_1 = __nccwpck_require__(142); +const executable_response_1 = __nccwpck_require__(3247); +const pluggable_auth_handler_1 = __nccwpck_require__(908); +/** + * Error thrown from the executable run by PluggableAuthClient. + */ +class ExecutableError extends Error { + constructor(message, code) { + super(`The executable failed with exit code: ${code} and error message: ${message}.`); + this.code = code; + Object.setPrototypeOf(this, new.target.prototype); + } +} +exports.ExecutableError = ExecutableError; +/** + * The default executable timeout when none is provided, in milliseconds. + */ +const DEFAULT_EXECUTABLE_TIMEOUT_MILLIS = 30 * 1000; +/** + * The minimum allowed executable timeout in milliseconds. + */ +const MINIMUM_EXECUTABLE_TIMEOUT_MILLIS = 5 * 1000; +/** + * The maximum allowed executable timeout in milliseconds. + */ +const MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS = 120 * 1000; +/** + * The environment variable to check to see if executable can be run. + * Value must be set to '1' for the executable to run. + */ +const GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES = 'GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES'; +/** + * The maximum currently supported executable version. + */ +const MAXIMUM_EXECUTABLE_VERSION = 1; +/** + * PluggableAuthClient enables the exchange of workload identity pool external credentials for + * Google access tokens by retrieving 3rd party tokens through a user supplied executable. These + * scripts/executables are completely independent of the Google Cloud Auth libraries. These + * credentials plug into ADC and will call the specified executable to retrieve the 3rd party token + * to be exchanged for a Google access token. + * + *

To use these credentials, the GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment variable + * must be set to '1'. This is for security reasons. + * + *

Both OIDC and SAML are supported. The executable must adhere to a specific response format + * defined below. + * + *

The executable must print out the 3rd party token to STDOUT in JSON format. When an + * output_file is specified in the credential configuration, the executable must also handle writing the + * JSON response to this file. + * + *

+ * OIDC response sample:
+ * {
+ *   "version": 1,
+ *   "success": true,
+ *   "token_type": "urn:ietf:params:oauth:token-type:id_token",
+ *   "id_token": "HEADER.PAYLOAD.SIGNATURE",
+ *   "expiration_time": 1620433341
+ * }
+ *
+ * SAML2 response sample:
+ * {
+ *   "version": 1,
+ *   "success": true,
+ *   "token_type": "urn:ietf:params:oauth:token-type:saml2",
+ *   "saml_response": "...",
+ *   "expiration_time": 1620433341
+ * }
+ *
+ * Error response sample:
+ * {
+ *   "version": 1,
+ *   "success": false,
+ *   "code": "401",
+ *   "message": "Error message."
+ * }
+ * 
+ * + *

The "expiration_time" field in the JSON response is only required for successful + * responses when an output file was specified in the credential configuration + * + *

The auth libraries will populate certain environment variables that will be accessible by the + * executable, such as: GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE, GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE, + * GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE, GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL, and + * GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE. + * + *

Please see this repositories README for a complete executable request/response specification. + */ +class PluggableAuthClient extends baseexternalclient_1.BaseExternalAccountClient { + /** + * Instantiates a PluggableAuthClient instance using the provided JSON + * object loaded from an external account credentials file. + * An error is thrown if the credential is not a valid pluggable auth credential. + * @param options The external account options object typically loaded from + * the external account JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + super(options, additionalOptions); + if (!options.credential_source.executable) { + throw new Error('No valid Pluggable Auth "credential_source" provided.'); + } + this.command = options.credential_source.executable.command; + if (!this.command) { + throw new Error('No valid Pluggable Auth "credential_source" provided.'); + } + // Check if the provided timeout exists and if it is valid. + if (options.credential_source.executable.timeout_millis === undefined) { + this.timeoutMillis = DEFAULT_EXECUTABLE_TIMEOUT_MILLIS; + } + else { + this.timeoutMillis = options.credential_source.executable.timeout_millis; + if (this.timeoutMillis < MINIMUM_EXECUTABLE_TIMEOUT_MILLIS || + this.timeoutMillis > MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS) { + throw new Error(`Timeout must be between ${MINIMUM_EXECUTABLE_TIMEOUT_MILLIS} and ` + + `${MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS} milliseconds.`); + } + } + this.outputFile = options.credential_source.executable.output_file; + this.handler = new pluggable_auth_handler_1.PluggableAuthHandler({ + command: this.command, + timeoutMillis: this.timeoutMillis, + outputFile: this.outputFile, + }); + this.credentialSourceType = 'executable'; + } + /** + * Triggered when an external subject token is needed to be exchanged for a + * GCP access token via GCP STS endpoint. + * This uses the `options.credential_source` object to figure out how + * to retrieve the token using the current environment. In this case, + * this calls a user provided executable which returns the subject token. + * The logic is summarized as: + * 1. Validated that the executable is allowed to run. The + * GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment must be set to + * 1 for security reasons. + * 2. If an output file is specified by the user, check the file location + * for a response. If the file exists and contains a valid response, + * return the subject token from the file. + * 3. Call the provided executable and return response. + * @return A promise that resolves with the external subject token. + */ + async retrieveSubjectToken() { + // Check if the executable is allowed to run. + if (process.env[GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES] !== '1') { + throw new Error('Pluggable Auth executables need to be explicitly allowed to run by ' + + 'setting the GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment ' + + 'Variable to 1.'); + } + let executableResponse = undefined; + // Try to get cached executable response from output file. + if (this.outputFile) { + executableResponse = await this.handler.retrieveCachedResponse(); + } + // If no response from output file, call the executable. + if (!executableResponse) { + // Set up environment map with required values for the executable. + const envMap = new Map(); + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE', this.audience); + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE', this.subjectTokenType); + // Always set to 0 because interactive mode is not supported. + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE', '0'); + if (this.outputFile) { + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE', this.outputFile); + } + const serviceAccountEmail = this.getServiceAccountEmail(); + if (serviceAccountEmail) { + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL', serviceAccountEmail); + } + executableResponse = + await this.handler.retrieveResponseFromExecutable(envMap); + } + if (executableResponse.version > MAXIMUM_EXECUTABLE_VERSION) { + throw new Error(`Version of executable is not currently supported, maximum supported version is ${MAXIMUM_EXECUTABLE_VERSION}.`); + } + // Check that response was successful. + if (!executableResponse.success) { + throw new ExecutableError(executableResponse.errorMessage, executableResponse.errorCode); + } + // Check that response contains expiration time if output file was specified. + if (this.outputFile) { + if (!executableResponse.expirationTime) { + throw new executable_response_1.InvalidExpirationTimeFieldError('The executable response must contain the `expiration_time` field for successful responses when an output_file has been specified in the configuration.'); + } + } + // Check that response is not expired. + if (executableResponse.isExpired()) { + throw new Error('Executable response is expired.'); + } + // Return subject token from response. + return executableResponse.subjectToken; + } +} +exports.PluggableAuthClient = PluggableAuthClient; + + +/***/ }), + +/***/ 908: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PluggableAuthHandler = void 0; +const pluggable_auth_client_1 = __nccwpck_require__(6077); +const executable_response_1 = __nccwpck_require__(3247); +const childProcess = __nccwpck_require__(5317); +const fs = __nccwpck_require__(9896); +/** + * A handler used to retrieve 3rd party token responses from user defined + * executables and cached file output for the PluggableAuthClient class. + */ +class PluggableAuthHandler { + /** + * Instantiates a PluggableAuthHandler instance using the provided + * PluggableAuthHandlerOptions object. + */ + constructor(options) { + if (!options.command) { + throw new Error('No command provided.'); + } + this.commandComponents = PluggableAuthHandler.parseCommand(options.command); + this.timeoutMillis = options.timeoutMillis; + if (!this.timeoutMillis) { + throw new Error('No timeoutMillis provided.'); + } + this.outputFile = options.outputFile; + } + /** + * Calls user provided executable to get a 3rd party subject token and + * returns the response. + * @param envMap a Map of additional Environment Variables required for + * the executable. + * @return A promise that resolves with the executable response. + */ + retrieveResponseFromExecutable(envMap) { + return new Promise((resolve, reject) => { + // Spawn process to run executable using added environment variables. + const child = childProcess.spawn(this.commandComponents[0], this.commandComponents.slice(1), { + env: { ...process.env, ...Object.fromEntries(envMap) }, + }); + let output = ''; + // Append stdout to output as executable runs. + child.stdout.on('data', (data) => { + output += data; + }); + // Append stderr as executable runs. + child.stderr.on('data', (err) => { + output += err; + }); + // Set up a timeout to end the child process and throw an error. + const timeout = setTimeout(() => { + // Kill child process and remove listeners so 'close' event doesn't get + // read after child process is killed. + child.removeAllListeners(); + child.kill(); + return reject(new Error('The executable failed to finish within the timeout specified.')); + }, this.timeoutMillis); + child.on('close', (code) => { + // Cancel timeout if executable closes before timeout is reached. + clearTimeout(timeout); + if (code === 0) { + // If the executable completed successfully, try to return the parsed response. + try { + const responseJson = JSON.parse(output); + const response = new executable_response_1.ExecutableResponse(responseJson); + return resolve(response); + } + catch (error) { + if (error instanceof executable_response_1.ExecutableResponseError) { + return reject(error); + } + return reject(new executable_response_1.ExecutableResponseError(`The executable returned an invalid response: ${output}`)); + } + } + else { + return reject(new pluggable_auth_client_1.ExecutableError(output, code.toString())); + } + }); + }); + } + /** + * Checks user provided output file for response from previous run of + * executable and return the response if it exists, is formatted correctly, and is not expired. + */ + async retrieveCachedResponse() { + if (!this.outputFile || this.outputFile.length === 0) { + return undefined; + } + let filePath; + try { + filePath = await fs.promises.realpath(this.outputFile); + } + catch (_a) { + // If file path cannot be resolved, return undefined. + return undefined; + } + if (!(await fs.promises.lstat(filePath)).isFile()) { + // If path does not lead to file, return undefined. + return undefined; + } + const responseString = await fs.promises.readFile(filePath, { + encoding: 'utf8', + }); + if (responseString === '') { + return undefined; + } + try { + const responseJson = JSON.parse(responseString); + const response = new executable_response_1.ExecutableResponse(responseJson); + // Check if response is successful and unexpired. + if (response.isValid()) { + return new executable_response_1.ExecutableResponse(responseJson); + } + return undefined; + } + catch (error) { + if (error instanceof executable_response_1.ExecutableResponseError) { + throw error; + } + throw new executable_response_1.ExecutableResponseError(`The output file contained an invalid response: ${responseString}`); + } + } + /** + * Parses given command string into component array, splitting on spaces unless + * spaces are between quotation marks. + */ + static parseCommand(command) { + // Split the command into components by splitting on spaces, + // unless spaces are contained in quotation marks. + const components = command.match(/(?:[^\s"]+|"[^"]*")+/g); + if (!components) { + throw new Error(`Provided command: "${command}" could not be parsed.`); + } + // Remove quotation marks from the beginning and end of each component if they are present. + for (let i = 0; i < components.length; i++) { + if (components[i][0] === '"' && components[i].slice(-1) === '"') { + components[i] = components[i].slice(1, -1); + } + } + return components; + } +} +exports.PluggableAuthHandler = PluggableAuthHandler; + + +/***/ }), + +/***/ 9807: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2015 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.UserRefreshClient = exports.USER_REFRESH_ACCOUNT_TYPE = void 0; +const oauth2client_1 = __nccwpck_require__(91); +const querystring_1 = __nccwpck_require__(3480); +exports.USER_REFRESH_ACCOUNT_TYPE = 'authorized_user'; +class UserRefreshClient extends oauth2client_1.OAuth2Client { + constructor(optionsOrClientId, clientSecret, refreshToken, eagerRefreshThresholdMillis, forceRefreshOnFailure) { + const opts = optionsOrClientId && typeof optionsOrClientId === 'object' + ? optionsOrClientId + : { + clientId: optionsOrClientId, + clientSecret, + refreshToken, + eagerRefreshThresholdMillis, + forceRefreshOnFailure, + }; + super(opts); + this._refreshToken = opts.refreshToken; + this.credentials.refresh_token = opts.refreshToken; + } + /** + * Refreshes the access token. + * @param refreshToken An ignored refreshToken.. + * @param callback Optional callback. + */ + async refreshTokenNoCache( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + refreshToken) { + return super.refreshTokenNoCache(this._refreshToken); + } + async fetchIdToken(targetAudience) { + const res = await this.transporter.request({ + ...UserRefreshClient.RETRY_CONFIG, + url: this.endpoints.oauth2TokenUrl, + headers: { + 'Content-Type': 'application/x-www-form-urlencoded', + }, + method: 'POST', + data: (0, querystring_1.stringify)({ + client_id: this._clientId, + client_secret: this._clientSecret, + grant_type: 'refresh_token', + refresh_token: this._refreshToken, + target_audience: targetAudience, + }), + }); + return res.data.id_token; + } + /** + * Create a UserRefreshClient credentials instance using the given input + * options. + * @param json The input object. + */ + fromJSON(json) { + if (!json) { + throw new Error('Must pass in a JSON object containing the user refresh token'); + } + if (json.type !== 'authorized_user') { + throw new Error('The incoming JSON object does not have the "authorized_user" type'); + } + if (!json.client_id) { + throw new Error('The incoming JSON object does not contain a client_id field'); + } + if (!json.client_secret) { + throw new Error('The incoming JSON object does not contain a client_secret field'); + } + if (!json.refresh_token) { + throw new Error('The incoming JSON object does not contain a refresh_token field'); + } + this._clientId = json.client_id; + this._clientSecret = json.client_secret; + this._refreshToken = json.refresh_token; + this.credentials.refresh_token = json.refresh_token; + this.quotaProjectId = json.quota_project_id; + this.universeDomain = json.universe_domain || this.universeDomain; + } + fromStream(inputStream, callback) { + if (callback) { + this.fromStreamAsync(inputStream).then(() => callback(), callback); + } + else { + return this.fromStreamAsync(inputStream); + } + } + async fromStreamAsync(inputStream) { + return new Promise((resolve, reject) => { + if (!inputStream) { + return reject(new Error('Must pass in a stream containing the user refresh token.')); + } + let s = ''; + inputStream + .setEncoding('utf8') + .on('error', reject) + .on('data', chunk => (s += chunk)) + .on('end', () => { + try { + const data = JSON.parse(s); + this.fromJSON(data); + return resolve(); + } + catch (err) { + return reject(err); + } + }); + }); + } + /** + * Create a UserRefreshClient credentials instance using the given input + * options. + * @param json The input object. + */ + static fromJSON(json) { + const client = new UserRefreshClient(); + client.fromJSON(json); + return client; + } +} +exports.UserRefreshClient = UserRefreshClient; + + +/***/ }), + +/***/ 121: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.StsCredentials = void 0; +const gaxios_1 = __nccwpck_require__(7003); +const querystring = __nccwpck_require__(3480); +const transporters_1 = __nccwpck_require__(7633); +const oauth2common_1 = __nccwpck_require__(6653); +/** + * Implements the OAuth 2.0 token exchange based on + * https://tools.ietf.org/html/rfc8693 + */ +class StsCredentials extends oauth2common_1.OAuthClientAuthHandler { + /** + * Initializes an STS credentials instance. + * @param tokenExchangeEndpoint The token exchange endpoint. + * @param clientAuthentication The client authentication credentials if + * available. + */ + constructor(tokenExchangeEndpoint, clientAuthentication) { + super(clientAuthentication); + this.tokenExchangeEndpoint = tokenExchangeEndpoint; + this.transporter = new transporters_1.DefaultTransporter(); + } + /** + * Exchanges the provided token for another type of token based on the + * rfc8693 spec. + * @param stsCredentialsOptions The token exchange options used to populate + * the token exchange request. + * @param additionalHeaders Optional additional headers to pass along the + * request. + * @param options Optional additional GCP-specific non-spec defined options + * to send with the request. + * Example: `&options=${encodeUriComponent(JSON.stringified(options))}` + * @return A promise that resolves with the token exchange response containing + * the requested token and its expiration time. + */ + async exchangeToken(stsCredentialsOptions, additionalHeaders, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + options) { + var _a, _b, _c; + const values = { + grant_type: stsCredentialsOptions.grantType, + resource: stsCredentialsOptions.resource, + audience: stsCredentialsOptions.audience, + scope: (_a = stsCredentialsOptions.scope) === null || _a === void 0 ? void 0 : _a.join(' '), + requested_token_type: stsCredentialsOptions.requestedTokenType, + subject_token: stsCredentialsOptions.subjectToken, + subject_token_type: stsCredentialsOptions.subjectTokenType, + actor_token: (_b = stsCredentialsOptions.actingParty) === null || _b === void 0 ? void 0 : _b.actorToken, + actor_token_type: (_c = stsCredentialsOptions.actingParty) === null || _c === void 0 ? void 0 : _c.actorTokenType, + // Non-standard GCP-specific options. + options: options && JSON.stringify(options), + }; + // Remove undefined fields. + Object.keys(values).forEach(key => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + if (typeof values[key] === 'undefined') { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + delete values[key]; + } + }); + const headers = { + 'Content-Type': 'application/x-www-form-urlencoded', + }; + // Inject additional STS headers if available. + Object.assign(headers, additionalHeaders || {}); + const opts = { + ...StsCredentials.RETRY_CONFIG, + url: this.tokenExchangeEndpoint.toString(), + method: 'POST', + headers, + data: querystring.stringify(values), + responseType: 'json', + }; + // Apply OAuth client authentication. + this.applyClientAuthenticationOptions(opts); + try { + const response = await this.transporter.request(opts); + // Successful response. + const stsSuccessfulResponse = response.data; + stsSuccessfulResponse.res = response; + return stsSuccessfulResponse; + } + catch (error) { + // Translate error to OAuthError. + if (error instanceof gaxios_1.GaxiosError && error.response) { + throw (0, oauth2common_1.getErrorFromOAuthErrorResponse)(error.response.data, + // Preserve other fields from the original error. + error); + } + // Request could fail before the server responds. + throw error; + } + } +} +exports.StsCredentials = StsCredentials; + + +/***/ }), + +/***/ 4627: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.UrlSubjectTokenSupplier = void 0; +/** + * Internal subject token supplier implementation used when a URL + * is configured in the credential configuration used to build an {@link IdentityPoolClient} + */ +class UrlSubjectTokenSupplier { + /** + * Instantiates a URL subject token supplier. + * @param opts The URL subject token supplier options to build the supplier with. + */ + constructor(opts) { + this.url = opts.url; + this.formatType = opts.formatType; + this.subjectTokenFieldName = opts.subjectTokenFieldName; + this.headers = opts.headers; + this.additionalGaxiosOptions = opts.additionalGaxiosOptions; + } + /** + * Sends a GET request to the URL provided in the constructor and resolves + * with the returned external subject token. + * @param context {@link ExternalAccountSupplierContext} from the calling + * {@link IdentityPoolClient}, contains the requested audience and subject + * token type for the external account identity. Not used. + */ + async getSubjectToken(context) { + const opts = { + ...this.additionalGaxiosOptions, + url: this.url, + method: 'GET', + headers: this.headers, + responseType: this.formatType, + }; + let subjectToken; + if (this.formatType === 'text') { + const response = await context.transporter.request(opts); + subjectToken = response.data; + } + else if (this.formatType === 'json' && this.subjectTokenFieldName) { + const response = await context.transporter.request(opts); + subjectToken = response.data[this.subjectTokenFieldName]; + } + if (!subjectToken) { + throw new Error('Unable to parse the subject_token from the credential_source URL'); + } + return subjectToken; + } +} +exports.UrlSubjectTokenSupplier = UrlSubjectTokenSupplier; + + +/***/ }), + +/***/ 3438: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +/* global window */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.BrowserCrypto = void 0; +// This file implements crypto functions we need using in-browser +// SubtleCrypto interface `window.crypto.subtle`. +const base64js = __nccwpck_require__(8793); +const crypto_1 = __nccwpck_require__(8851); +class BrowserCrypto { + constructor() { + if (typeof window === 'undefined' || + window.crypto === undefined || + window.crypto.subtle === undefined) { + throw new Error("SubtleCrypto not found. Make sure it's an https:// website."); + } + } + async sha256DigestBase64(str) { + // SubtleCrypto digest() method is async, so we must make + // this method async as well. + // To calculate SHA256 digest using SubtleCrypto, we first + // need to convert an input string to an ArrayBuffer: + const inputBuffer = new TextEncoder().encode(str); + // Result is ArrayBuffer as well. + const outputBuffer = await window.crypto.subtle.digest('SHA-256', inputBuffer); + return base64js.fromByteArray(new Uint8Array(outputBuffer)); + } + randomBytesBase64(count) { + const array = new Uint8Array(count); + window.crypto.getRandomValues(array); + return base64js.fromByteArray(array); + } + static padBase64(base64) { + // base64js requires padding, so let's add some '=' + while (base64.length % 4 !== 0) { + base64 += '='; + } + return base64; + } + async verify(pubkey, data, signature) { + const algo = { + name: 'RSASSA-PKCS1-v1_5', + hash: { name: 'SHA-256' }, + }; + const dataArray = new TextEncoder().encode(data); + const signatureArray = base64js.toByteArray(BrowserCrypto.padBase64(signature)); + const cryptoKey = await window.crypto.subtle.importKey('jwk', pubkey, algo, true, ['verify']); + // SubtleCrypto's verify method is async so we must make + // this method async as well. + const result = await window.crypto.subtle.verify(algo, cryptoKey, signatureArray, dataArray); + return result; + } + async sign(privateKey, data) { + const algo = { + name: 'RSASSA-PKCS1-v1_5', + hash: { name: 'SHA-256' }, + }; + const dataArray = new TextEncoder().encode(data); + const cryptoKey = await window.crypto.subtle.importKey('jwk', privateKey, algo, true, ['sign']); + // SubtleCrypto's sign method is async so we must make + // this method async as well. + const result = await window.crypto.subtle.sign(algo, cryptoKey, dataArray); + return base64js.fromByteArray(new Uint8Array(result)); + } + decodeBase64StringUtf8(base64) { + const uint8array = base64js.toByteArray(BrowserCrypto.padBase64(base64)); + const result = new TextDecoder().decode(uint8array); + return result; + } + encodeBase64StringUtf8(text) { + const uint8array = new TextEncoder().encode(text); + const result = base64js.fromByteArray(uint8array); + return result; + } + /** + * Computes the SHA-256 hash of the provided string. + * @param str The plain text string to hash. + * @return A promise that resolves with the SHA-256 hash of the provided + * string in hexadecimal encoding. + */ + async sha256DigestHex(str) { + // SubtleCrypto digest() method is async, so we must make + // this method async as well. + // To calculate SHA256 digest using SubtleCrypto, we first + // need to convert an input string to an ArrayBuffer: + const inputBuffer = new TextEncoder().encode(str); + // Result is ArrayBuffer as well. + const outputBuffer = await window.crypto.subtle.digest('SHA-256', inputBuffer); + return (0, crypto_1.fromArrayBufferToHex)(outputBuffer); + } + /** + * Computes the HMAC hash of a message using the provided crypto key and the + * SHA-256 algorithm. + * @param key The secret crypto key in utf-8 or ArrayBuffer format. + * @param msg The plain text message. + * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer + * format. + */ + async signWithHmacSha256(key, msg) { + // Convert key, if provided in ArrayBuffer format, to string. + const rawKey = typeof key === 'string' + ? key + : String.fromCharCode(...new Uint16Array(key)); + const enc = new TextEncoder(); + const cryptoKey = await window.crypto.subtle.importKey('raw', enc.encode(rawKey), { + name: 'HMAC', + hash: { + name: 'SHA-256', + }, + }, false, ['sign']); + return window.crypto.subtle.sign('HMAC', cryptoKey, enc.encode(msg)); + } +} +exports.BrowserCrypto = BrowserCrypto; + + +/***/ }), + +/***/ 8851: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +/* global window */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createCrypto = createCrypto; +exports.hasBrowserCrypto = hasBrowserCrypto; +exports.fromArrayBufferToHex = fromArrayBufferToHex; +const crypto_1 = __nccwpck_require__(3438); +const crypto_2 = __nccwpck_require__(7388); +function createCrypto() { + if (hasBrowserCrypto()) { + return new crypto_1.BrowserCrypto(); + } + return new crypto_2.NodeCrypto(); +} +function hasBrowserCrypto() { + return (typeof window !== 'undefined' && + typeof window.crypto !== 'undefined' && + typeof window.crypto.subtle !== 'undefined'); +} +/** + * Converts an ArrayBuffer to a hexadecimal string. + * @param arrayBuffer The ArrayBuffer to convert to hexadecimal string. + * @return The hexadecimal encoding of the ArrayBuffer. + */ +function fromArrayBufferToHex(arrayBuffer) { + // Convert buffer to byte array. + const byteArray = Array.from(new Uint8Array(arrayBuffer)); + // Convert bytes to hex string. + return byteArray + .map(byte => { + return byte.toString(16).padStart(2, '0'); + }) + .join(''); +} + + +/***/ }), + +/***/ 7388: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NodeCrypto = void 0; +const crypto = __nccwpck_require__(6982); +class NodeCrypto { + async sha256DigestBase64(str) { + return crypto.createHash('sha256').update(str).digest('base64'); + } + randomBytesBase64(count) { + return crypto.randomBytes(count).toString('base64'); + } + async verify(pubkey, data, signature) { + const verifier = crypto.createVerify('RSA-SHA256'); + verifier.update(data); + verifier.end(); + return verifier.verify(pubkey, signature, 'base64'); + } + async sign(privateKey, data) { + const signer = crypto.createSign('RSA-SHA256'); + signer.update(data); + signer.end(); + return signer.sign(privateKey, 'base64'); + } + decodeBase64StringUtf8(base64) { + return Buffer.from(base64, 'base64').toString('utf-8'); + } + encodeBase64StringUtf8(text) { + return Buffer.from(text, 'utf-8').toString('base64'); + } + /** + * Computes the SHA-256 hash of the provided string. + * @param str The plain text string to hash. + * @return A promise that resolves with the SHA-256 hash of the provided + * string in hexadecimal encoding. + */ + async sha256DigestHex(str) { + return crypto.createHash('sha256').update(str).digest('hex'); + } + /** + * Computes the HMAC hash of a message using the provided crypto key and the + * SHA-256 algorithm. + * @param key The secret crypto key in utf-8 or ArrayBuffer format. + * @param msg The plain text message. + * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer + * format. + */ + async signWithHmacSha256(key, msg) { + const cryptoKey = typeof key === 'string' ? key : toBuffer(key); + return toArrayBuffer(crypto.createHmac('sha256', cryptoKey).update(msg).digest()); + } +} +exports.NodeCrypto = NodeCrypto; +/** + * Converts a Node.js Buffer to an ArrayBuffer. + * https://stackoverflow.com/questions/8609289/convert-a-binary-nodejs-buffer-to-javascript-arraybuffer + * @param buffer The Buffer input to covert. + * @return The ArrayBuffer representation of the input. + */ +function toArrayBuffer(buffer) { + return buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength); +} +/** + * Converts an ArrayBuffer to a Node.js Buffer. + * @param arrayBuffer The ArrayBuffer input to covert. + * @return The Buffer representation of the input. + */ +function toBuffer(arrayBuffer) { + return Buffer.from(arrayBuffer); +} + + +/***/ }), + +/***/ 492: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GoogleAuth = exports.auth = exports.DefaultTransporter = exports.PassThroughClient = exports.ExecutableError = exports.PluggableAuthClient = exports.DownscopedClient = exports.BaseExternalAccountClient = exports.ExternalAccountClient = exports.IdentityPoolClient = exports.AwsRequestSigner = exports.AwsClient = exports.UserRefreshClient = exports.LoginTicket = exports.ClientAuthentication = exports.OAuth2Client = exports.CodeChallengeMethod = exports.Impersonated = exports.JWT = exports.JWTAccess = exports.IdTokenClient = exports.IAMAuth = exports.GCPEnv = exports.Compute = exports.DEFAULT_UNIVERSE = exports.AuthClient = exports.gaxios = exports.gcpMetadata = void 0; +// Copyright 2017 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +const googleauth_1 = __nccwpck_require__(5934); +Object.defineProperty(exports, "GoogleAuth", ({ enumerable: true, get: function () { return googleauth_1.GoogleAuth; } })); +// Export common deps to ensure types/instances are the exact match. Useful +// for consistently configuring the library across versions. +exports.gcpMetadata = __nccwpck_require__(3046); +exports.gaxios = __nccwpck_require__(7003); +var authclient_1 = __nccwpck_require__(4810); +Object.defineProperty(exports, "AuthClient", ({ enumerable: true, get: function () { return authclient_1.AuthClient; } })); +Object.defineProperty(exports, "DEFAULT_UNIVERSE", ({ enumerable: true, get: function () { return authclient_1.DEFAULT_UNIVERSE; } })); +var computeclient_1 = __nccwpck_require__(977); +Object.defineProperty(exports, "Compute", ({ enumerable: true, get: function () { return computeclient_1.Compute; } })); +var envDetect_1 = __nccwpck_require__(963); +Object.defineProperty(exports, "GCPEnv", ({ enumerable: true, get: function () { return envDetect_1.GCPEnv; } })); +var iam_1 = __nccwpck_require__(7009); +Object.defineProperty(exports, "IAMAuth", ({ enumerable: true, get: function () { return iam_1.IAMAuth; } })); +var idtokenclient_1 = __nccwpck_require__(2718); +Object.defineProperty(exports, "IdTokenClient", ({ enumerable: true, get: function () { return idtokenclient_1.IdTokenClient; } })); +var jwtaccess_1 = __nccwpck_require__(7060); +Object.defineProperty(exports, "JWTAccess", ({ enumerable: true, get: function () { return jwtaccess_1.JWTAccess; } })); +var jwtclient_1 = __nccwpck_require__(5277); +Object.defineProperty(exports, "JWT", ({ enumerable: true, get: function () { return jwtclient_1.JWT; } })); +var impersonated_1 = __nccwpck_require__(9964); +Object.defineProperty(exports, "Impersonated", ({ enumerable: true, get: function () { return impersonated_1.Impersonated; } })); +var oauth2client_1 = __nccwpck_require__(91); +Object.defineProperty(exports, "CodeChallengeMethod", ({ enumerable: true, get: function () { return oauth2client_1.CodeChallengeMethod; } })); +Object.defineProperty(exports, "OAuth2Client", ({ enumerable: true, get: function () { return oauth2client_1.OAuth2Client; } })); +Object.defineProperty(exports, "ClientAuthentication", ({ enumerable: true, get: function () { return oauth2client_1.ClientAuthentication; } })); +var loginticket_1 = __nccwpck_require__(3882); +Object.defineProperty(exports, "LoginTicket", ({ enumerable: true, get: function () { return loginticket_1.LoginTicket; } })); +var refreshclient_1 = __nccwpck_require__(9807); +Object.defineProperty(exports, "UserRefreshClient", ({ enumerable: true, get: function () { return refreshclient_1.UserRefreshClient; } })); +var awsclient_1 = __nccwpck_require__(1261); +Object.defineProperty(exports, "AwsClient", ({ enumerable: true, get: function () { return awsclient_1.AwsClient; } })); +var awsrequestsigner_1 = __nccwpck_require__(7647); +Object.defineProperty(exports, "AwsRequestSigner", ({ enumerable: true, get: function () { return awsrequestsigner_1.AwsRequestSigner; } })); +var identitypoolclient_1 = __nccwpck_require__(9960); +Object.defineProperty(exports, "IdentityPoolClient", ({ enumerable: true, get: function () { return identitypoolclient_1.IdentityPoolClient; } })); +var externalclient_1 = __nccwpck_require__(8323); +Object.defineProperty(exports, "ExternalAccountClient", ({ enumerable: true, get: function () { return externalclient_1.ExternalAccountClient; } })); +var baseexternalclient_1 = __nccwpck_require__(142); +Object.defineProperty(exports, "BaseExternalAccountClient", ({ enumerable: true, get: function () { return baseexternalclient_1.BaseExternalAccountClient; } })); +var downscopedclient_1 = __nccwpck_require__(7556); +Object.defineProperty(exports, "DownscopedClient", ({ enumerable: true, get: function () { return downscopedclient_1.DownscopedClient; } })); +var pluggable_auth_client_1 = __nccwpck_require__(6077); +Object.defineProperty(exports, "PluggableAuthClient", ({ enumerable: true, get: function () { return pluggable_auth_client_1.PluggableAuthClient; } })); +Object.defineProperty(exports, "ExecutableError", ({ enumerable: true, get: function () { return pluggable_auth_client_1.ExecutableError; } })); +var passthrough_1 = __nccwpck_require__(2045); +Object.defineProperty(exports, "PassThroughClient", ({ enumerable: true, get: function () { return passthrough_1.PassThroughClient; } })); +var transporters_1 = __nccwpck_require__(7633); +Object.defineProperty(exports, "DefaultTransporter", ({ enumerable: true, get: function () { return transporters_1.DefaultTransporter; } })); +const auth = new googleauth_1.GoogleAuth(); +exports.auth = auth; + + +/***/ }), + +/***/ 8290: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2017 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.validate = validate; +// Accepts an options object passed from the user to the API. In the +// previous version of the API, it referred to a `Request` options object. +// Now it refers to an Axiox Request Config object. This is here to help +// ensure users don't pass invalid options when they upgrade from 0.x to 1.x. +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function validate(options) { + const vpairs = [ + { invalid: 'uri', expected: 'url' }, + { invalid: 'json', expected: 'data' }, + { invalid: 'qs', expected: 'params' }, + ]; + for (const pair of vpairs) { + if (options[pair.invalid]) { + const e = `'${pair.invalid}' is not a valid configuration option. Please use '${pair.expected}' instead. This library is using Axios for requests. Please see https://github.com/axios/axios to learn more about the valid request options.`; + throw new Error(e); + } + } +} + + +/***/ }), + +/***/ 7633: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DefaultTransporter = void 0; +const gaxios_1 = __nccwpck_require__(7003); +const options_1 = __nccwpck_require__(8290); +// eslint-disable-next-line @typescript-eslint/no-var-requires +const pkg = __nccwpck_require__(6066); +const PRODUCT_NAME = 'google-api-nodejs-client'; +class DefaultTransporter { + constructor() { + /** + * A configurable, replacable `Gaxios` instance. + */ + this.instance = new gaxios_1.Gaxios(); + } + /** + * Configures request options before making a request. + * @param opts GaxiosOptions options. + * @return Configured options. + */ + configure(opts = {}) { + opts.headers = opts.headers || {}; + if (typeof window === 'undefined') { + // set transporter user agent if not in browser + const uaValue = opts.headers['User-Agent']; + if (!uaValue) { + opts.headers['User-Agent'] = DefaultTransporter.USER_AGENT; + } + else if (!uaValue.includes(`${PRODUCT_NAME}/`)) { + opts.headers['User-Agent'] = + `${uaValue} ${DefaultTransporter.USER_AGENT}`; + } + // track google-auth-library-nodejs version: + if (!opts.headers['x-goog-api-client']) { + const nodeVersion = process.version.replace(/^v/, ''); + opts.headers['x-goog-api-client'] = `gl-node/${nodeVersion}`; + } + } + return opts; + } + /** + * Makes a request using Gaxios with given options. + * @param opts GaxiosOptions options. + * @param callback optional callback that contains GaxiosResponse object. + * @return GaxiosPromise, assuming no callback is passed. + */ + request(opts) { + // ensure the user isn't passing in request-style options + opts = this.configure(opts); + (0, options_1.validate)(opts); + return this.instance.request(opts).catch(e => { + throw this.processError(e); + }); + } + get defaults() { + return this.instance.defaults; + } + set defaults(opts) { + this.instance.defaults = opts; + } + /** + * Changes the error to include details from the body. + */ + processError(e) { + const res = e.response; + const err = e; + const body = res ? res.data : null; + if (res && body && body.error && res.status !== 200) { + if (typeof body.error === 'string') { + err.message = body.error; + err.status = res.status; + } + else if (Array.isArray(body.error.errors)) { + err.message = body.error.errors + .map((err2) => err2.message) + .join('\n'); + err.code = body.error.code; + err.errors = body.error.errors; + } + else { + err.message = body.error.message; + err.code = body.error.code; + } + } + else if (res && res.status >= 400) { + // Consider all 4xx and 5xx responses errors. + err.message = body; + err.status = res.status; + } + return err; + } +} +exports.DefaultTransporter = DefaultTransporter; +/** + * Default user agent. + */ +DefaultTransporter.USER_AGENT = `${PRODUCT_NAME}/${pkg.version}`; + + +/***/ }), + +/***/ 7870: +/***/ (function(__unused_webpack_module, exports) { + +"use strict"; + +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _LRUCache_instances, _LRUCache_cache, _LRUCache_moveToEnd, _LRUCache_evict; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.LRUCache = void 0; +exports.snakeToCamel = snakeToCamel; +exports.originalOrCamelOptions = originalOrCamelOptions; +/** + * Returns the camel case of a provided string. + * + * @remarks + * + * Match any `_` and not `_` pair, then return the uppercase of the not `_` + * character. + * + * @internal + * + * @param str the string to convert + * @returns the camelCase'd string + */ +function snakeToCamel(str) { + return str.replace(/([_][^_])/g, match => match.slice(1).toUpperCase()); +} +/** + * Get the value of `obj[key]` or `obj[camelCaseKey]`, with a preference + * for original, non-camelCase key. + * + * @param obj object to lookup a value in + * @returns a `get` function for getting `obj[key || snakeKey]`, if available + */ +function originalOrCamelOptions(obj) { + /** + * + * @param key an index of object, preferably snake_case + * @returns the value `obj[key || snakeKey]`, if available + */ + function get(key) { + var _a; + const o = (obj || {}); + return (_a = o[key]) !== null && _a !== void 0 ? _a : o[snakeToCamel(key)]; + } + return { get }; +} +/** + * A simple LRU cache utility. + * Not meant for external usage. + * + * @experimental + * @internal + */ +class LRUCache { + constructor(options) { + _LRUCache_instances.add(this); + /** + * Maps are in order. Thus, the older item is the first item. + * + * {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map} + */ + _LRUCache_cache.set(this, new Map()); + this.capacity = options.capacity; + this.maxAge = options.maxAge; + } + /** + * Add an item to the cache. + * + * @param key the key to upsert + * @param value the value of the key + */ + set(key, value) { + __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_moveToEnd).call(this, key, value); + __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_evict).call(this); + } + /** + * Get an item from the cache. + * + * @param key the key to retrieve + */ + get(key) { + const item = __classPrivateFieldGet(this, _LRUCache_cache, "f").get(key); + if (!item) + return; + __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_moveToEnd).call(this, key, item.value); + __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_evict).call(this); + return item.value; + } +} +exports.LRUCache = LRUCache; +_LRUCache_cache = new WeakMap(), _LRUCache_instances = new WeakSet(), _LRUCache_moveToEnd = function _LRUCache_moveToEnd(key, value) { + __classPrivateFieldGet(this, _LRUCache_cache, "f").delete(key); + __classPrivateFieldGet(this, _LRUCache_cache, "f").set(key, { + value, + lastAccessed: Date.now(), + }); +}, _LRUCache_evict = function _LRUCache_evict() { + const cutoffDate = this.maxAge ? Date.now() - this.maxAge : 0; + /** + * Because we know Maps are in order, this item is both the + * last item in the list (capacity) and oldest (maxAge). + */ + let oldestItem = __classPrivateFieldGet(this, _LRUCache_cache, "f").entries().next(); + while (!oldestItem.done && + (__classPrivateFieldGet(this, _LRUCache_cache, "f").size > this.capacity || // too many + oldestItem.value[1].lastAccessed < cutoffDate) // too old + ) { + __classPrivateFieldGet(this, _LRUCache_cache, "f").delete(oldestItem.value[0]); + oldestItem = __classPrivateFieldGet(this, _LRUCache_cache, "f").entries().next(); + } +}; + + +/***/ }), + +/***/ 1628: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Colours = void 0; +/** + * Handles figuring out if we can use ANSI colours and handing out the escape codes. + * + * This is for package-internal use only, and may change at any time. + * + * @private + * @internal + */ +class Colours { + /** + * @param stream The stream (e.g. process.stderr) + * @returns true if the stream should have colourization enabled + */ + static isEnabled(stream) { + return (stream.isTTY && + (typeof stream.getColorDepth === 'function' + ? stream.getColorDepth() > 2 + : true)); + } + static refresh() { + Colours.enabled = Colours.isEnabled(process.stderr); + if (!this.enabled) { + Colours.reset = ''; + Colours.bright = ''; + Colours.dim = ''; + Colours.red = ''; + Colours.green = ''; + Colours.yellow = ''; + Colours.blue = ''; + Colours.magenta = ''; + Colours.cyan = ''; + Colours.white = ''; + Colours.grey = ''; + } + else { + Colours.reset = '\u001b[0m'; + Colours.bright = '\u001b[1m'; + Colours.dim = '\u001b[2m'; + Colours.red = '\u001b[31m'; + Colours.green = '\u001b[32m'; + Colours.yellow = '\u001b[33m'; + Colours.blue = '\u001b[34m'; + Colours.magenta = '\u001b[35m'; + Colours.cyan = '\u001b[36m'; + Colours.white = '\u001b[37m'; + Colours.grey = '\u001b[90m'; + } + } +} +exports.Colours = Colours; +Colours.enabled = false; +Colours.reset = ''; +Colours.bright = ''; +Colours.dim = ''; +Colours.red = ''; +Colours.green = ''; +Colours.yellow = ''; +Colours.blue = ''; +Colours.magenta = ''; +Colours.cyan = ''; +Colours.white = ''; +Colours.grey = ''; +Colours.refresh(); +//# sourceMappingURL=colours.js.map + +/***/ }), + +/***/ 1577: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +__exportStar(__nccwpck_require__(4788), exports); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 4788: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2021-2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.env = exports.DebugLogBackendBase = exports.placeholder = exports.AdhocDebugLogger = exports.LogSeverity = void 0; +exports.getNodeBackend = getNodeBackend; +exports.getDebugBackend = getDebugBackend; +exports.getStructuredBackend = getStructuredBackend; +exports.setBackend = setBackend; +exports.log = log; +const node_events_1 = __nccwpck_require__(8474); +const process = __importStar(__nccwpck_require__(1708)); +const util = __importStar(__nccwpck_require__(7975)); +const colours_1 = __nccwpck_require__(1628); +// Some functions (as noted) are based on the Node standard library, from +// the following file: +// +// https://github.com/nodejs/node/blob/main/lib/internal/util/debuglog.js +/** + * This module defines an ad-hoc debug logger for Google Cloud Platform + * client libraries in Node. An ad-hoc debug logger is a tool which lets + * users use an external, unified interface (in this case, environment + * variables) to determine what logging they want to see at runtime. This + * isn't necessarily fed into the console, but is meant to be under the + * control of the user. The kind of logging that will be produced by this + * is more like "call retry happened", not "event you'd want to record + * in Cloud Logger". + * + * More for Googlers implementing libraries with it: + * go/cloud-client-logging-design + */ +/** + * Possible log levels. These are a subset of Cloud Observability levels. + * https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry#LogSeverity + */ +var LogSeverity; +(function (LogSeverity) { + LogSeverity["DEFAULT"] = "DEFAULT"; + LogSeverity["DEBUG"] = "DEBUG"; + LogSeverity["INFO"] = "INFO"; + LogSeverity["WARNING"] = "WARNING"; + LogSeverity["ERROR"] = "ERROR"; +})(LogSeverity || (exports.LogSeverity = LogSeverity = {})); +/** + * Our logger instance. This actually contains the meat of dealing + * with log lines, including EventEmitter. This contains the function + * that will be passed back to users of the package. + */ +class AdhocDebugLogger extends node_events_1.EventEmitter { + /** + * @param upstream The backend will pass a function that will be + * called whenever our logger function is invoked. + */ + constructor(namespace, upstream) { + super(); + this.namespace = namespace; + this.upstream = upstream; + this.func = Object.assign(this.invoke.bind(this), { + // Also add an instance pointer back to us. + instance: this, + // And pull over the EventEmitter functionality. + on: (event, listener) => this.on(event, listener), + }); + // Convenience methods for log levels. + this.func.debug = (...args) => this.invokeSeverity(LogSeverity.DEBUG, ...args); + this.func.info = (...args) => this.invokeSeverity(LogSeverity.INFO, ...args); + this.func.warn = (...args) => this.invokeSeverity(LogSeverity.WARNING, ...args); + this.func.error = (...args) => this.invokeSeverity(LogSeverity.ERROR, ...args); + this.func.sublog = (namespace) => log(namespace, this.func); + } + invoke(fields, ...args) { + // Push out any upstream logger first. + if (this.upstream) { + this.upstream(fields, ...args); + } + // Emit sink events. + this.emit('log', fields, args); + } + invokeSeverity(severity, ...args) { + this.invoke({ severity }, ...args); + } +} +exports.AdhocDebugLogger = AdhocDebugLogger; +/** + * This can be used in place of a real logger while waiting for Promises or disabling logging. + */ +exports.placeholder = new AdhocDebugLogger('', () => { }).func; +/** + * The base class for debug logging backends. It's possible to use this, but the + * same non-guarantees above still apply (unstable interface, etc). + * + * @private + * @internal + */ +class DebugLogBackendBase { + constructor() { + var _a; + this.cached = new Map(); + this.filters = []; + this.filtersSet = false; + // Look for the Node config variable for what systems to enable. We'll store + // these for the log method below, which will call setFilters() once. + let nodeFlag = (_a = process.env[exports.env.nodeEnables]) !== null && _a !== void 0 ? _a : '*'; + if (nodeFlag === 'all') { + nodeFlag = '*'; + } + this.filters = nodeFlag.split(','); + } + log(namespace, fields, ...args) { + try { + if (!this.filtersSet) { + this.setFilters(); + this.filtersSet = true; + } + let logger = this.cached.get(namespace); + if (!logger) { + logger = this.makeLogger(namespace); + this.cached.set(namespace, logger); + } + logger(fields, ...args); + } + catch (e) { + // Silently ignore all errors; we don't want them to interfere with + // the user's running app. + // e; + console.error(e); + } + } +} +exports.DebugLogBackendBase = DebugLogBackendBase; +// The basic backend. This one definitely works, but it's less feature-filled. +// +// Rather than using util.debuglog, this implements the same basic logic directly. +// The reason for this decision is that debuglog checks the value of the +// NODE_DEBUG environment variable before any user code runs; we therefore +// can't pipe our own enables into it (and util.debuglog will never print unless +// the user duplicates it into NODE_DEBUG, which isn't reasonable). +// +class NodeBackend extends DebugLogBackendBase { + constructor() { + super(...arguments); + // Default to allowing all systems, since we gate earlier based on whether the + // variable is empty. + this.enabledRegexp = /.*/g; + } + isEnabled(namespace) { + return this.enabledRegexp.test(namespace); + } + makeLogger(namespace) { + if (!this.enabledRegexp.test(namespace)) { + return () => { }; + } + return (fields, ...args) => { + var _a; + // TODO: `fields` needs to be turned into a string here, one way or another. + const nscolour = `${colours_1.Colours.green}${namespace}${colours_1.Colours.reset}`; + const pid = `${colours_1.Colours.yellow}${process.pid}${colours_1.Colours.reset}`; + let level; + switch (fields.severity) { + case LogSeverity.ERROR: + level = `${colours_1.Colours.red}${fields.severity}${colours_1.Colours.reset}`; + break; + case LogSeverity.INFO: + level = `${colours_1.Colours.magenta}${fields.severity}${colours_1.Colours.reset}`; + break; + case LogSeverity.WARNING: + level = `${colours_1.Colours.yellow}${fields.severity}${colours_1.Colours.reset}`; + break; + default: + level = (_a = fields.severity) !== null && _a !== void 0 ? _a : LogSeverity.DEFAULT; + break; + } + const msg = util.formatWithOptions({ colors: colours_1.Colours.enabled }, ...args); + const filteredFields = Object.assign({}, fields); + delete filteredFields.severity; + const fieldsJson = Object.getOwnPropertyNames(filteredFields).length + ? JSON.stringify(filteredFields) + : ''; + const fieldsColour = fieldsJson + ? `${colours_1.Colours.grey}${fieldsJson}${colours_1.Colours.reset}` + : ''; + console.error('%s [%s|%s] %s%s', pid, nscolour, level, msg, fieldsJson ? ` ${fieldsColour}` : ''); + }; + } + // Regexp patterns below are from here: + // https://github.com/nodejs/node/blob/c0aebed4b3395bd65d54b18d1fd00f071002ac20/lib/internal/util/debuglog.js#L36 + setFilters() { + const totalFilters = this.filters.join(','); + const regexp = totalFilters + .replace(/[|\\{}()[\]^$+?.]/g, '\\$&') + .replace(/\*/g, '.*') + .replace(/,/g, '$|^'); + this.enabledRegexp = new RegExp(`^${regexp}$`, 'i'); + } +} +/** + * @returns A backend based on Node util.debuglog; this is the default. + */ +function getNodeBackend() { + return new NodeBackend(); +} +class DebugBackend extends DebugLogBackendBase { + constructor(pkg) { + super(); + this.debugPkg = pkg; + } + makeLogger(namespace) { + const debugLogger = this.debugPkg(namespace); + return (fields, ...args) => { + // TODO: `fields` needs to be turned into a string here. + debugLogger(args[0], ...args.slice(1)); + }; + } + setFilters() { + var _a; + const existingFilters = (_a = process.env['NODE_DEBUG']) !== null && _a !== void 0 ? _a : ''; + process.env['NODE_DEBUG'] = `${existingFilters}${existingFilters ? ',' : ''}${this.filters.join(',')}`; + } +} +/** + * Creates a "debug" package backend. The user must call require('debug') and pass + * the resulting object to this function. + * + * ``` + * setBackend(getDebugBackend(require('debug'))) + * ``` + * + * https://www.npmjs.com/package/debug + * + * Note: Google does not explicitly endorse or recommend this package; it's just + * being provided as an option. + * + * @returns A backend based on the npm "debug" package. + */ +function getDebugBackend(debugPkg) { + return new DebugBackend(debugPkg); +} +/** + * This pretty much works like the Node logger, but it outputs structured + * logging JSON matching Google Cloud's ingestion specs. Rather than handling + * its own output, it wraps another backend. The passed backend must be a subclass + * of `DebugLogBackendBase` (any of the backends exposed by this package will work). + */ +class StructuredBackend extends DebugLogBackendBase { + constructor(upstream) { + var _a; + super(); + this.upstream = (_a = upstream) !== null && _a !== void 0 ? _a : new NodeBackend(); + } + makeLogger(namespace) { + const debugLogger = this.upstream.makeLogger(namespace); + return (fields, ...args) => { + var _a; + const severity = (_a = fields.severity) !== null && _a !== void 0 ? _a : LogSeverity.INFO; + const json = Object.assign({ + severity, + message: util.format(...args), + }, fields); + const jsonString = JSON.stringify(json); + debugLogger(fields, jsonString); + }; + } + setFilters() { + this.upstream.setFilters(); + } +} +/** + * Creates a "structured logging" backend. This pretty much works like the + * Node logger, but it outputs structured logging JSON matching Google + * Cloud's ingestion specs instead of plain text. + * + * ``` + * setBackend(getStructuredBackend()) + * ``` + * + * @param upstream If you want to use something besides the Node backend to + * write the actual log lines into, pass that here. + * @returns A backend based on Google Cloud structured logging. + */ +function getStructuredBackend(upstream) { + return new StructuredBackend(upstream); +} +/** + * The environment variables that we standardized on, for all ad-hoc logging. + */ +exports.env = { + /** + * Filter wildcards specific to the Node syntax, and similar to the built-in + * utils.debuglog() environment variable. If missing, disables logging. + */ + nodeEnables: 'GOOGLE_SDK_NODE_LOGGING', +}; +// Keep a copy of all namespaced loggers so users can reliably .on() them. +// Note that these cached functions will need to deal with changes in the backend. +const loggerCache = new Map(); +// Our current global backend. This might be: +let cachedBackend = undefined; +/** + * Set the backend to use for our log output. + * - A backend object + * - null to disable logging + * - undefined for "nothing yet", defaults to the Node backend + * + * @param backend Results from one of the get*Backend() functions. + */ +function setBackend(backend) { + cachedBackend = backend; + loggerCache.clear(); +} +/** + * Creates a logging function. Multiple calls to this with the same namespace + * will produce the same logger, with the same event emitter hooks. + * + * Namespaces can be a simple string ("system" name), or a qualified string + * (system:subsystem), which can be used for filtering, or for "system:*". + * + * @param namespace The namespace, a descriptive text string. + * @returns A function you can call that works similar to console.log(). + */ +function log(namespace, parent) { + // If the enable flag isn't set, do nothing. + const enablesFlag = process.env[exports.env.nodeEnables]; + if (!enablesFlag) { + return exports.placeholder; + } + // This might happen mostly if the typings are dropped in a user's code, + // or if they're calling from JavaScript. + if (!namespace) { + return exports.placeholder; + } + // Handle sub-loggers. + if (parent) { + namespace = `${parent.instance.namespace}:${namespace}`; + } + // Reuse loggers so things like event sinks are persistent. + const existing = loggerCache.get(namespace); + if (existing) { + return existing.func; + } + // Do we have a backend yet? + if (cachedBackend === null) { + // Explicitly disabled. + return exports.placeholder; + } + else if (cachedBackend === undefined) { + // One hasn't been made yet, so default to Node. + cachedBackend = getNodeBackend(); + } + // The logger is further wrapped so we can handle the backend changing out. + const logger = (() => { + let previousBackend = undefined; + const newLogger = new AdhocDebugLogger(namespace, (fields, ...args) => { + if (previousBackend !== cachedBackend) { + // Did the user pass a custom backend? + if (cachedBackend === null) { + // Explicitly disabled. + return; + } + else if (cachedBackend === undefined) { + // One hasn't been made yet, so default to Node. + cachedBackend = getNodeBackend(); + } + previousBackend = cachedBackend; + } + cachedBackend === null || cachedBackend === void 0 ? void 0 : cachedBackend.log(namespace, fields, ...args); + }); + return newLogger; + })(); + loggerCache.set(namespace, logger); + return logger.func; +} +//# sourceMappingURL=logging-utils.js.map + +/***/ }), + +/***/ 8568: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +/** + * Copyright 2018 Google LLC + * + * Distributed under MIT license. + * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT + */ +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var _GoogleToken_instances, _GoogleToken_inFlightRequest, _GoogleToken_getTokenAsync, _GoogleToken_getTokenAsyncInner, _GoogleToken_ensureEmail, _GoogleToken_revokeTokenAsync, _GoogleToken_configure, _GoogleToken_requestToken; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GoogleToken = void 0; +const fs = __nccwpck_require__(9896); +const gaxios_1 = __nccwpck_require__(7003); +const jws = __nccwpck_require__(3324); +const path = __nccwpck_require__(6928); +const util_1 = __nccwpck_require__(9023); +const readFile = fs.readFile + ? (0, util_1.promisify)(fs.readFile) + : async () => { + // if running in the web-browser, fs.readFile may not have been shimmed. + throw new ErrorWithCode('use key rather than keyFile.', 'MISSING_CREDENTIALS'); + }; +const GOOGLE_TOKEN_URL = 'https://www.googleapis.com/oauth2/v4/token'; +const GOOGLE_REVOKE_TOKEN_URL = 'https://accounts.google.com/o/oauth2/revoke?token='; +class ErrorWithCode extends Error { + constructor(message, code) { + super(message); + this.code = code; + } +} +class GoogleToken { + get accessToken() { + return this.rawToken ? this.rawToken.access_token : undefined; + } + get idToken() { + return this.rawToken ? this.rawToken.id_token : undefined; + } + get tokenType() { + return this.rawToken ? this.rawToken.token_type : undefined; + } + get refreshToken() { + return this.rawToken ? this.rawToken.refresh_token : undefined; + } + /** + * Create a GoogleToken. + * + * @param options Configuration object. + */ + constructor(options) { + _GoogleToken_instances.add(this); + this.transporter = { + request: opts => (0, gaxios_1.request)(opts), + }; + _GoogleToken_inFlightRequest.set(this, void 0); + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_configure).call(this, options); + } + /** + * Returns whether the token has expired. + * + * @return true if the token has expired, false otherwise. + */ + hasExpired() { + const now = new Date().getTime(); + if (this.rawToken && this.expiresAt) { + return now >= this.expiresAt; + } + else { + return true; + } + } + /** + * Returns whether the token will expire within eagerRefreshThresholdMillis + * + * @return true if the token will be expired within eagerRefreshThresholdMillis, false otherwise. + */ + isTokenExpiring() { + var _a; + const now = new Date().getTime(); + const eagerRefreshThresholdMillis = (_a = this.eagerRefreshThresholdMillis) !== null && _a !== void 0 ? _a : 0; + if (this.rawToken && this.expiresAt) { + return this.expiresAt <= now + eagerRefreshThresholdMillis; + } + else { + return true; + } + } + getToken(callback, opts = {}) { + if (typeof callback === 'object') { + opts = callback; + callback = undefined; + } + opts = Object.assign({ + forceRefresh: false, + }, opts); + if (callback) { + const cb = callback; + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_getTokenAsync).call(this, opts).then(t => cb(null, t), callback); + return; + } + return __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_getTokenAsync).call(this, opts); + } + /** + * Given a keyFile, extract the key and client email if available + * @param keyFile Path to a json, pem, or p12 file that contains the key. + * @returns an object with privateKey and clientEmail properties + */ + async getCredentials(keyFile) { + const ext = path.extname(keyFile); + switch (ext) { + case '.json': { + const key = await readFile(keyFile, 'utf8'); + const body = JSON.parse(key); + const privateKey = body.private_key; + const clientEmail = body.client_email; + if (!privateKey || !clientEmail) { + throw new ErrorWithCode('private_key and client_email are required.', 'MISSING_CREDENTIALS'); + } + return { privateKey, clientEmail }; + } + case '.der': + case '.crt': + case '.pem': { + const privateKey = await readFile(keyFile, 'utf8'); + return { privateKey }; + } + case '.p12': + case '.pfx': { + throw new ErrorWithCode('*.p12 certificates are not supported after v6.1.2. ' + + 'Consider utilizing *.json format or converting *.p12 to *.pem using the OpenSSL CLI.', 'UNKNOWN_CERTIFICATE_TYPE'); + } + default: + throw new ErrorWithCode('Unknown certificate type. Type is determined based on file extension. ' + + 'Current supported extensions are *.json, and *.pem.', 'UNKNOWN_CERTIFICATE_TYPE'); + } + } + revokeToken(callback) { + if (callback) { + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_revokeTokenAsync).call(this).then(() => callback(), callback); + return; + } + return __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_revokeTokenAsync).call(this); + } +} +exports.GoogleToken = GoogleToken; +_GoogleToken_inFlightRequest = new WeakMap(), _GoogleToken_instances = new WeakSet(), _GoogleToken_getTokenAsync = async function _GoogleToken_getTokenAsync(opts) { + if (__classPrivateFieldGet(this, _GoogleToken_inFlightRequest, "f") && !opts.forceRefresh) { + return __classPrivateFieldGet(this, _GoogleToken_inFlightRequest, "f"); + } + try { + return await (__classPrivateFieldSet(this, _GoogleToken_inFlightRequest, __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_getTokenAsyncInner).call(this, opts), "f")); + } + finally { + __classPrivateFieldSet(this, _GoogleToken_inFlightRequest, undefined, "f"); + } +}, _GoogleToken_getTokenAsyncInner = async function _GoogleToken_getTokenAsyncInner(opts) { + if (this.isTokenExpiring() === false && opts.forceRefresh === false) { + return Promise.resolve(this.rawToken); + } + if (!this.key && !this.keyFile) { + throw new Error('No key or keyFile set.'); + } + if (!this.key && this.keyFile) { + const creds = await this.getCredentials(this.keyFile); + this.key = creds.privateKey; + this.iss = creds.clientEmail || this.iss; + if (!creds.clientEmail) { + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_ensureEmail).call(this); + } + } + return __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_requestToken).call(this); +}, _GoogleToken_ensureEmail = function _GoogleToken_ensureEmail() { + if (!this.iss) { + throw new ErrorWithCode('email is required.', 'MISSING_CREDENTIALS'); + } +}, _GoogleToken_revokeTokenAsync = async function _GoogleToken_revokeTokenAsync() { + if (!this.accessToken) { + throw new Error('No token to revoke.'); + } + const url = GOOGLE_REVOKE_TOKEN_URL + this.accessToken; + await this.transporter.request({ + url, + retry: true, + }); + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_configure).call(this, { + email: this.iss, + sub: this.sub, + key: this.key, + keyFile: this.keyFile, + scope: this.scope, + additionalClaims: this.additionalClaims, + }); +}, _GoogleToken_configure = function _GoogleToken_configure(options = {}) { + this.keyFile = options.keyFile; + this.key = options.key; + this.rawToken = undefined; + this.iss = options.email || options.iss; + this.sub = options.sub; + this.additionalClaims = options.additionalClaims; + if (typeof options.scope === 'object') { + this.scope = options.scope.join(' '); + } + else { + this.scope = options.scope; + } + this.eagerRefreshThresholdMillis = options.eagerRefreshThresholdMillis; + if (options.transporter) { + this.transporter = options.transporter; + } +}, _GoogleToken_requestToken = +/** + * Request the token from Google. + */ +async function _GoogleToken_requestToken() { + var _a, _b; + const iat = Math.floor(new Date().getTime() / 1000); + const additionalClaims = this.additionalClaims || {}; + const payload = Object.assign({ + iss: this.iss, + scope: this.scope, + aud: GOOGLE_TOKEN_URL, + exp: iat + 3600, + iat, + sub: this.sub, + }, additionalClaims); + const signedJWT = jws.sign({ + header: { alg: 'RS256' }, + payload, + secret: this.key, + }); + try { + const r = await this.transporter.request({ + method: 'POST', + url: GOOGLE_TOKEN_URL, + data: { + grant_type: 'urn:ietf:params:oauth:grant-type:jwt-bearer', + assertion: signedJWT, + }, + headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, + responseType: 'json', + retryConfig: { + httpMethodsToRetry: ['POST'], + }, + }); + this.rawToken = r.data; + this.expiresAt = + r.data.expires_in === null || r.data.expires_in === undefined + ? undefined + : (iat + r.data.expires_in) * 1000; + return this.rawToken; + } + catch (e) { + this.rawToken = undefined; + this.tokenExpires = undefined; + const body = e.response && ((_a = e.response) === null || _a === void 0 ? void 0 : _a.data) + ? (_b = e.response) === null || _b === void 0 ? void 0 : _b.data + : {}; + if (body.error) { + const desc = body.error_description + ? `: ${body.error_description}` + : ''; + e.message = `${body.error}${desc}`; + } + throw e; + } +}; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 3813: +/***/ ((module) => { + +"use strict"; + + +module.exports = (flag, argv = process.argv) => { + const prefix = flag.startsWith('-') ? '' : (flag.length === 1 ? '-' : '--'); + const position = argv.indexOf(prefix + flag); + const terminatorPosition = argv.indexOf('--'); + return position !== -1 && (terminatorPosition === -1 || position < terminatorPosition); +}; + + +/***/ }), + +/***/ 7847: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const net_1 = __importDefault(__nccwpck_require__(9278)); +const tls_1 = __importDefault(__nccwpck_require__(4756)); +const url_1 = __importDefault(__nccwpck_require__(7016)); +const debug_1 = __importDefault(__nccwpck_require__(2830)); +const once_1 = __importDefault(__nccwpck_require__(8662)); +const agent_base_1 = __nccwpck_require__(2960); +const debug = (0, debug_1.default)('http-proxy-agent'); +function isHTTPS(protocol) { + return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false; +} +/** + * The `HttpProxyAgent` implements an HTTP Agent subclass that connects + * to the specified "HTTP proxy server" in order to proxy HTTP requests. + * + * @api public + */ +class HttpProxyAgent extends agent_base_1.Agent { + constructor(_opts) { + let opts; + if (typeof _opts === 'string') { + opts = url_1.default.parse(_opts); + } + else { + opts = _opts; + } + if (!opts) { + throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!'); + } + debug('Creating new HttpProxyAgent instance: %o', opts); + super(opts); + const proxy = Object.assign({}, opts); + // If `true`, then connect to the proxy server over TLS. + // Defaults to `false`. + this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol); + // Prefer `hostname` over `host`, and set the `port` if needed. + proxy.host = proxy.hostname || proxy.host; + if (typeof proxy.port === 'string') { + proxy.port = parseInt(proxy.port, 10); + } + if (!proxy.port && proxy.host) { + proxy.port = this.secureProxy ? 443 : 80; + } + if (proxy.host && proxy.path) { + // If both a `host` and `path` are specified then it's most likely + // the result of a `url.parse()` call... we need to remove the + // `path` portion so that `net.connect()` doesn't attempt to open + // that as a Unix socket file. + delete proxy.path; + delete proxy.pathname; + } + this.proxy = proxy; + } + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + * + * @api protected + */ + callback(req, opts) { + return __awaiter(this, void 0, void 0, function* () { + const { proxy, secureProxy } = this; + const parsed = url_1.default.parse(req.path); + if (!parsed.protocol) { + parsed.protocol = 'http:'; + } + if (!parsed.hostname) { + parsed.hostname = opts.hostname || opts.host || null; + } + if (parsed.port == null && typeof opts.port) { + parsed.port = String(opts.port); + } + if (parsed.port === '80') { + // if port is 80, then we can remove the port so that the + // ":80" portion is not on the produced URL + parsed.port = ''; + } + // Change the `http.ClientRequest` instance's "path" field + // to the absolute path of the URL that will be requested. + req.path = url_1.default.format(parsed); + // Inject the `Proxy-Authorization` header if necessary. + if (proxy.auth) { + req.setHeader('Proxy-Authorization', `Basic ${Buffer.from(proxy.auth).toString('base64')}`); + } + // Create a socket connection to the proxy server. + let socket; + if (secureProxy) { + debug('Creating `tls.Socket`: %o', proxy); + socket = tls_1.default.connect(proxy); + } + else { + debug('Creating `net.Socket`: %o', proxy); + socket = net_1.default.connect(proxy); + } + // At this point, the http ClientRequest's internal `_header` field + // might have already been set. If this is the case then we'll need + // to re-generate the string since we just changed the `req.path`. + if (req._header) { + let first; + let endOfHeaders; + debug('Regenerating stored HTTP header string for request'); + req._header = null; + req._implicitHeader(); + if (req.output && req.output.length > 0) { + // Node < 12 + debug('Patching connection write() output buffer with updated header'); + first = req.output[0]; + endOfHeaders = first.indexOf('\r\n\r\n') + 4; + req.output[0] = req._header + first.substring(endOfHeaders); + debug('Output buffer: %o', req.output); + } + else if (req.outputData && req.outputData.length > 0) { + // Node >= 12 + debug('Patching connection write() output buffer with updated header'); + first = req.outputData[0].data; + endOfHeaders = first.indexOf('\r\n\r\n') + 4; + req.outputData[0].data = + req._header + first.substring(endOfHeaders); + debug('Output buffer: %o', req.outputData[0].data); + } + } + // Wait for the socket's `connect` event, so that this `callback()` + // function throws instead of the `http` request machinery. This is + // important for i.e. `PacProxyAgent` which determines a failed proxy + // connection via the `callback()` function throwing. + yield (0, once_1.default)(socket, 'connect'); + return socket; + }); + } +} +exports["default"] = HttpProxyAgent; +//# sourceMappingURL=agent.js.map + +/***/ }), + +/***/ 1970: +/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const agent_1 = __importDefault(__nccwpck_require__(7847)); +function createHttpProxyAgent(opts) { + return new agent_1.default(opts); +} +(function (createHttpProxyAgent) { + createHttpProxyAgent.HttpProxyAgent = agent_1.default; + createHttpProxyAgent.prototype = agent_1.default.prototype; +})(createHttpProxyAgent || (createHttpProxyAgent = {})); +module.exports = createHttpProxyAgent; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 2960: +/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const events_1 = __nccwpck_require__(4434); +const debug_1 = __importDefault(__nccwpck_require__(2830)); +const promisify_1 = __importDefault(__nccwpck_require__(2600)); +const debug = debug_1.default('agent-base'); +function isAgent(v) { + return Boolean(v) && typeof v.addRequest === 'function'; +} +function isSecureEndpoint() { + const { stack } = new Error(); + if (typeof stack !== 'string') + return false; + return stack.split('\n').some(l => l.indexOf('(https.js:') !== -1 || l.indexOf('node:https:') !== -1); +} +function createAgent(callback, opts) { + return new createAgent.Agent(callback, opts); +} +(function (createAgent) { + /** + * Base `http.Agent` implementation. + * No pooling/keep-alive is implemented by default. + * + * @param {Function} callback + * @api public + */ + class Agent extends events_1.EventEmitter { + constructor(callback, _opts) { + super(); + let opts = _opts; + if (typeof callback === 'function') { + this.callback = callback; + } + else if (callback) { + opts = callback; + } + // Timeout for the socket to be returned from the callback + this.timeout = null; + if (opts && typeof opts.timeout === 'number') { + this.timeout = opts.timeout; + } + // These aren't actually used by `agent-base`, but are required + // for the TypeScript definition files in `@types/node` :/ + this.maxFreeSockets = 1; + this.maxSockets = 1; + this.maxTotalSockets = Infinity; + this.sockets = {}; + this.freeSockets = {}; + this.requests = {}; + this.options = {}; + } + get defaultPort() { + if (typeof this.explicitDefaultPort === 'number') { + return this.explicitDefaultPort; + } + return isSecureEndpoint() ? 443 : 80; + } + set defaultPort(v) { + this.explicitDefaultPort = v; + } + get protocol() { + if (typeof this.explicitProtocol === 'string') { + return this.explicitProtocol; + } + return isSecureEndpoint() ? 'https:' : 'http:'; + } + set protocol(v) { + this.explicitProtocol = v; + } + callback(req, opts, fn) { + throw new Error('"agent-base" has no default implementation, you must subclass and override `callback()`'); + } + /** + * Called by node-core's "_http_client.js" module when creating + * a new HTTP request with this Agent instance. + * + * @api public + */ + addRequest(req, _opts) { + const opts = Object.assign({}, _opts); + if (typeof opts.secureEndpoint !== 'boolean') { + opts.secureEndpoint = isSecureEndpoint(); + } + if (opts.host == null) { + opts.host = 'localhost'; + } + if (opts.port == null) { + opts.port = opts.secureEndpoint ? 443 : 80; + } + if (opts.protocol == null) { + opts.protocol = opts.secureEndpoint ? 'https:' : 'http:'; + } + if (opts.host && opts.path) { + // If both a `host` and `path` are specified then it's most + // likely the result of a `url.parse()` call... we need to + // remove the `path` portion so that `net.connect()` doesn't + // attempt to open that as a unix socket file. + delete opts.path; + } + delete opts.agent; + delete opts.hostname; + delete opts._defaultAgent; + delete opts.defaultPort; + delete opts.createConnection; + // Hint to use "Connection: close" + // XXX: non-documented `http` module API :( + req._last = true; + req.shouldKeepAlive = false; + let timedOut = false; + let timeoutId = null; + const timeoutMs = opts.timeout || this.timeout; + const onerror = (err) => { + if (req._hadError) + return; + req.emit('error', err); + // For Safety. Some additional errors might fire later on + // and we need to make sure we don't double-fire the error event. + req._hadError = true; + }; + const ontimeout = () => { + timeoutId = null; + timedOut = true; + const err = new Error(`A "socket" was not created for HTTP request before ${timeoutMs}ms`); + err.code = 'ETIMEOUT'; + onerror(err); + }; + const callbackError = (err) => { + if (timedOut) + return; + if (timeoutId !== null) { + clearTimeout(timeoutId); + timeoutId = null; + } + onerror(err); + }; + const onsocket = (socket) => { + if (timedOut) + return; + if (timeoutId != null) { + clearTimeout(timeoutId); + timeoutId = null; + } + if (isAgent(socket)) { + // `socket` is actually an `http.Agent` instance, so + // relinquish responsibility for this `req` to the Agent + // from here on + debug('Callback returned another Agent instance %o', socket.constructor.name); + socket.addRequest(req, opts); + return; + } + if (socket) { + socket.once('free', () => { + this.freeSocket(socket, opts); + }); + req.onSocket(socket); + return; + } + const err = new Error(`no Duplex stream was returned to agent-base for \`${req.method} ${req.path}\``); + onerror(err); + }; + if (typeof this.callback !== 'function') { + onerror(new Error('`callback` is not defined')); + return; + } + if (!this.promisifiedCallback) { + if (this.callback.length >= 3) { + debug('Converting legacy callback function to promise'); + this.promisifiedCallback = promisify_1.default(this.callback); + } + else { + this.promisifiedCallback = this.callback; + } + } + if (typeof timeoutMs === 'number' && timeoutMs > 0) { + timeoutId = setTimeout(ontimeout, timeoutMs); + } + if ('port' in opts && typeof opts.port !== 'number') { + opts.port = Number(opts.port); + } + try { + debug('Resolving socket for %o request: %o', opts.protocol, `${req.method} ${req.path}`); + Promise.resolve(this.promisifiedCallback(req, opts)).then(onsocket, callbackError); + } + catch (err) { + Promise.reject(err).catch(callbackError); + } + } + freeSocket(socket, opts) { + debug('Freeing socket %o %o', socket.constructor.name, opts); + socket.destroy(); + } + destroy() { + debug('Destroying agent %o', this.constructor.name); + } + } + createAgent.Agent = Agent; + // So that `instanceof` works correctly + createAgent.prototype = createAgent.Agent.prototype; +})(createAgent || (createAgent = {})); +module.exports = createAgent; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 2600: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +function promisify(fn) { + return function (req, opts) { + return new Promise((resolve, reject) => { + fn.call(this, req, opts, (err, rtn) => { + if (err) { + reject(err); + } + else { + resolve(rtn); + } + }); + }); + }; +} +exports["default"] = promisify; +//# sourceMappingURL=promisify.js.map + +/***/ }), + +/***/ 3669: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpsProxyAgent = void 0; +const net = __importStar(__nccwpck_require__(9278)); +const tls = __importStar(__nccwpck_require__(4756)); +const assert_1 = __importDefault(__nccwpck_require__(2613)); +const debug_1 = __importDefault(__nccwpck_require__(2830)); +const agent_base_1 = __nccwpck_require__(8894); +const url_1 = __nccwpck_require__(7016); +const parse_proxy_response_1 = __nccwpck_require__(7943); +const debug = (0, debug_1.default)('https-proxy-agent'); +const setServernameFromNonIpHost = (options) => { + if (options.servername === undefined && + options.host && + !net.isIP(options.host)) { + return { + ...options, + servername: options.host, + }; + } + return options; +}; +/** + * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to + * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests. + * + * Outgoing HTTP requests are first tunneled through the proxy server using the + * `CONNECT` HTTP request method to establish a connection to the proxy server, + * and then the proxy server connects to the destination target and issues the + * HTTP request from the proxy server. + * + * `https:` requests have their socket connection upgraded to TLS once + * the connection to the proxy server has been established. + */ +class HttpsProxyAgent extends agent_base_1.Agent { + constructor(proxy, opts) { + super(opts); + this.options = { path: undefined }; + this.proxy = typeof proxy === 'string' ? new url_1.URL(proxy) : proxy; + this.proxyHeaders = opts?.headers ?? {}; + debug('Creating new HttpsProxyAgent instance: %o', this.proxy.href); + // Trim off the brackets from IPv6 addresses + const host = (this.proxy.hostname || this.proxy.host).replace(/^\[|\]$/g, ''); + const port = this.proxy.port + ? parseInt(this.proxy.port, 10) + : this.proxy.protocol === 'https:' + ? 443 + : 80; + this.connectOpts = { + // Attempt to negotiate http/1.1 for proxy servers that support http/2 + ALPNProtocols: ['http/1.1'], + ...(opts ? omit(opts, 'headers') : null), + host, + port, + }; + } + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + */ + async connect(req, opts) { + const { proxy } = this; + if (!opts.host) { + throw new TypeError('No "host" provided'); + } + // Create a socket connection to the proxy server. + let socket; + if (proxy.protocol === 'https:') { + debug('Creating `tls.Socket`: %o', this.connectOpts); + socket = tls.connect(setServernameFromNonIpHost(this.connectOpts)); + } + else { + debug('Creating `net.Socket`: %o', this.connectOpts); + socket = net.connect(this.connectOpts); + } + const headers = typeof this.proxyHeaders === 'function' + ? this.proxyHeaders() + : { ...this.proxyHeaders }; + const host = net.isIPv6(opts.host) ? `[${opts.host}]` : opts.host; + let payload = `CONNECT ${host}:${opts.port} HTTP/1.1\r\n`; + // Inject the `Proxy-Authorization` header if necessary. + if (proxy.username || proxy.password) { + const auth = `${decodeURIComponent(proxy.username)}:${decodeURIComponent(proxy.password)}`; + headers['Proxy-Authorization'] = `Basic ${Buffer.from(auth).toString('base64')}`; + } + headers.Host = `${host}:${opts.port}`; + if (!headers['Proxy-Connection']) { + headers['Proxy-Connection'] = this.keepAlive + ? 'Keep-Alive' + : 'close'; + } + for (const name of Object.keys(headers)) { + payload += `${name}: ${headers[name]}\r\n`; + } + const proxyResponsePromise = (0, parse_proxy_response_1.parseProxyResponse)(socket); + socket.write(`${payload}\r\n`); + const { connect, buffered } = await proxyResponsePromise; + req.emit('proxyConnect', connect); + this.emit('proxyConnect', connect, req); + if (connect.statusCode === 200) { + req.once('socket', resume); + if (opts.secureEndpoint) { + // The proxy is connecting to a TLS server, so upgrade + // this socket connection to a TLS connection. + debug('Upgrading socket connection to TLS'); + return tls.connect({ + ...omit(setServernameFromNonIpHost(opts), 'host', 'path', 'port'), + socket, + }); + } + return socket; + } + // Some other status code that's not 200... need to re-play the HTTP + // header "data" events onto the socket once the HTTP machinery is + // attached so that the node core `http` can parse and handle the + // error status code. + // Close the original socket, and a new "fake" socket is returned + // instead, so that the proxy doesn't get the HTTP request + // written to it (which may contain `Authorization` headers or other + // sensitive data). + // + // See: https://hackerone.com/reports/541502 + socket.destroy(); + const fakeSocket = new net.Socket({ writable: false }); + fakeSocket.readable = true; + // Need to wait for the "socket" event to re-play the "data" events. + req.once('socket', (s) => { + debug('Replaying proxy buffer for failed request'); + (0, assert_1.default)(s.listenerCount('data') > 0); + // Replay the "buffered" Buffer onto the fake `socket`, since at + // this point the HTTP module machinery has been hooked up for + // the user. + s.push(buffered); + s.push(null); + }); + return fakeSocket; + } +} +HttpsProxyAgent.protocols = ['http', 'https']; +exports.HttpsProxyAgent = HttpsProxyAgent; +function resume(socket) { + socket.resume(); +} +function omit(obj, ...keys) { + const ret = {}; + let key; + for (key in obj) { + if (!keys.includes(key)) { + ret[key] = obj[key]; + } + } + return ret; +} +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 7943: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.parseProxyResponse = void 0; +const debug_1 = __importDefault(__nccwpck_require__(2830)); +const debug = (0, debug_1.default)('https-proxy-agent:parse-proxy-response'); +function parseProxyResponse(socket) { + return new Promise((resolve, reject) => { + // we need to buffer any HTTP traffic that happens with the proxy before we get + // the CONNECT response, so that if the response is anything other than an "200" + // response code, then we can re-play the "data" events on the socket once the + // HTTP parser is hooked up... + let buffersLength = 0; + const buffers = []; + function read() { + const b = socket.read(); + if (b) + ondata(b); + else + socket.once('readable', read); + } + function cleanup() { + socket.removeListener('end', onend); + socket.removeListener('error', onerror); + socket.removeListener('readable', read); + } + function onend() { + cleanup(); + debug('onend'); + reject(new Error('Proxy connection ended before receiving CONNECT response')); + } + function onerror(err) { + cleanup(); + debug('onerror %o', err); + reject(err); + } + function ondata(b) { + buffers.push(b); + buffersLength += b.length; + const buffered = Buffer.concat(buffers, buffersLength); + const endOfHeaders = buffered.indexOf('\r\n\r\n'); + if (endOfHeaders === -1) { + // keep buffering + debug('have not received end of HTTP headers yet...'); + read(); + return; + } + const headerParts = buffered + .slice(0, endOfHeaders) + .toString('ascii') + .split('\r\n'); + const firstLine = headerParts.shift(); + if (!firstLine) { + socket.destroy(); + return reject(new Error('No header received from proxy CONNECT response')); + } + const firstLineParts = firstLine.split(' '); + const statusCode = +firstLineParts[1]; + const statusText = firstLineParts.slice(2).join(' '); + const headers = {}; + for (const header of headerParts) { + if (!header) + continue; + const firstColon = header.indexOf(':'); + if (firstColon === -1) { + socket.destroy(); + return reject(new Error(`Invalid header from proxy CONNECT response: "${header}"`)); + } + const key = header.slice(0, firstColon).toLowerCase(); + const value = header.slice(firstColon + 1).trimStart(); + const current = headers[key]; + if (typeof current === 'string') { + headers[key] = [current, value]; + } + else if (Array.isArray(current)) { + current.push(value); + } + else { + headers[key] = value; + } + } + debug('got proxy server response: %o %o', firstLine, headers); + cleanup(); + resolve({ + connect: { + statusCode, + statusText, + headers, + }, + buffered, + }); + } + socket.on('error', onerror); + socket.on('end', onend); + read(); + }); +} +exports.parseProxyResponse = parseProxyResponse; +//# sourceMappingURL=parse-proxy-response.js.map + +/***/ }), + +/***/ 9598: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +try { + var util = __nccwpck_require__(9023); + /* istanbul ignore next */ + if (typeof util.inherits !== 'function') throw ''; + module.exports = util.inherits; +} catch (e) { + /* istanbul ignore next */ + module.exports = __nccwpck_require__(6589); +} + + +/***/ }), + +/***/ 6589: +/***/ ((module) => { + +if (typeof Object.create === 'function') { + // implementation from standard node.js 'util' module + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + ctor.prototype = Object.create(superCtor.prototype, { + constructor: { + value: ctor, + enumerable: false, + writable: true, + configurable: true + } + }) + } + }; +} else { + // old school shim for old browsers + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + var TempCtor = function () {} + TempCtor.prototype = superCtor.prototype + ctor.prototype = new TempCtor() + ctor.prototype.constructor = ctor + } + } +} + + +/***/ }), + +/***/ 6543: +/***/ ((module) => { + +"use strict"; + + +const isStream = stream => + stream !== null && + typeof stream === 'object' && + typeof stream.pipe === 'function'; + +isStream.writable = stream => + isStream(stream) && + stream.writable !== false && + typeof stream._write === 'function' && + typeof stream._writableState === 'object'; + +isStream.readable = stream => + isStream(stream) && + stream.readable !== false && + typeof stream._read === 'function' && + typeof stream._readableState === 'object'; + +isStream.duplex = stream => + isStream.writable(stream) && + isStream.readable(stream); + +isStream.transform = stream => + isStream.duplex(stream) && + typeof stream._transform === 'function'; + +module.exports = isStream; + + +/***/ }), + +/***/ 4826: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var json_stringify = (__nccwpck_require__(3651).stringify); +var json_parse = __nccwpck_require__(3197); + +module.exports = function(options) { + return { + parse: json_parse(options), + stringify: json_stringify + } +}; +//create the default method members with no options applied for backwards compatibility +module.exports.parse = json_parse(); +module.exports.stringify = json_stringify; + + +/***/ }), + +/***/ 3197: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var BigNumber = null; + +// regexpxs extracted from +// (c) BSD-3-Clause +// https://github.com/fastify/secure-json-parse/graphs/contributors and https://github.com/hapijs/bourne/graphs/contributors + +const suspectProtoRx = /(?:_|\\u005[Ff])(?:_|\\u005[Ff])(?:p|\\u0070)(?:r|\\u0072)(?:o|\\u006[Ff])(?:t|\\u0074)(?:o|\\u006[Ff])(?:_|\\u005[Ff])(?:_|\\u005[Ff])/; +const suspectConstructorRx = /(?:c|\\u0063)(?:o|\\u006[Ff])(?:n|\\u006[Ee])(?:s|\\u0073)(?:t|\\u0074)(?:r|\\u0072)(?:u|\\u0075)(?:c|\\u0063)(?:t|\\u0074)(?:o|\\u006[Ff])(?:r|\\u0072)/; + +/* + json_parse.js + 2012-06-20 + + Public Domain. + + NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK. + + This file creates a json_parse function. + During create you can (optionally) specify some behavioural switches + + require('json-bigint')(options) + + The optional options parameter holds switches that drive certain + aspects of the parsing process: + * options.strict = true will warn about duplicate-key usage in the json. + The default (strict = false) will silently ignore those and overwrite + values for keys that are in duplicate use. + + The resulting function follows this signature: + json_parse(text, reviver) + This method parses a JSON text to produce an object or array. + It can throw a SyntaxError exception. + + The optional reviver parameter is a function that can filter and + transform the results. It receives each of the keys and values, + and its return value is used instead of the original value. + If it returns what it received, then the structure is not modified. + If it returns undefined then the member is deleted. + + Example: + + // Parse the text. Values that look like ISO date strings will + // be converted to Date objects. + + myData = json_parse(text, function (key, value) { + var a; + if (typeof value === 'string') { + a = +/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value); + if (a) { + return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4], + +a[5], +a[6])); + } + } + return value; + }); + + This is a reference implementation. You are free to copy, modify, or + redistribute. + + This code should be minified before deployment. + See http://javascript.crockford.com/jsmin.html + + USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO + NOT CONTROL. +*/ + +/*members "", "\"", "\/", "\\", at, b, call, charAt, f, fromCharCode, + hasOwnProperty, message, n, name, prototype, push, r, t, text +*/ + +var json_parse = function (options) { + 'use strict'; + + // This is a function that can parse a JSON text, producing a JavaScript + // data structure. It is a simple, recursive descent parser. It does not use + // eval or regular expressions, so it can be used as a model for implementing + // a JSON parser in other languages. + + // We are defining the function inside of another function to avoid creating + // global variables. + + // Default options one can override by passing options to the parse() + var _options = { + strict: false, // not being strict means do not generate syntax errors for "duplicate key" + storeAsString: false, // toggles whether the values should be stored as BigNumber (default) or a string + alwaysParseAsBig: false, // toggles whether all numbers should be Big + useNativeBigInt: false, // toggles whether to use native BigInt instead of bignumber.js + protoAction: 'error', + constructorAction: 'error', + }; + + // If there are options, then use them to override the default _options + if (options !== undefined && options !== null) { + if (options.strict === true) { + _options.strict = true; + } + if (options.storeAsString === true) { + _options.storeAsString = true; + } + _options.alwaysParseAsBig = + options.alwaysParseAsBig === true ? options.alwaysParseAsBig : false; + _options.useNativeBigInt = + options.useNativeBigInt === true ? options.useNativeBigInt : false; + + if (typeof options.constructorAction !== 'undefined') { + if ( + options.constructorAction === 'error' || + options.constructorAction === 'ignore' || + options.constructorAction === 'preserve' + ) { + _options.constructorAction = options.constructorAction; + } else { + throw new Error( + `Incorrect value for constructorAction option, must be "error", "ignore" or undefined but passed ${options.constructorAction}` + ); + } + } + + if (typeof options.protoAction !== 'undefined') { + if ( + options.protoAction === 'error' || + options.protoAction === 'ignore' || + options.protoAction === 'preserve' + ) { + _options.protoAction = options.protoAction; + } else { + throw new Error( + `Incorrect value for protoAction option, must be "error", "ignore" or undefined but passed ${options.protoAction}` + ); + } + } + } + + var at, // The index of the current character + ch, // The current character + escapee = { + '"': '"', + '\\': '\\', + '/': '/', + b: '\b', + f: '\f', + n: '\n', + r: '\r', + t: '\t', + }, + text, + error = function (m) { + // Call error when something is wrong. + + throw { + name: 'SyntaxError', + message: m, + at: at, + text: text, + }; + }, + next = function (c) { + // If a c parameter is provided, verify that it matches the current character. + + if (c && c !== ch) { + error("Expected '" + c + "' instead of '" + ch + "'"); + } + + // Get the next character. When there are no more characters, + // return the empty string. + + ch = text.charAt(at); + at += 1; + return ch; + }, + number = function () { + // Parse a number value. + + var number, + string = ''; + + if (ch === '-') { + string = '-'; + next('-'); + } + while (ch >= '0' && ch <= '9') { + string += ch; + next(); + } + if (ch === '.') { + string += '.'; + while (next() && ch >= '0' && ch <= '9') { + string += ch; + } + } + if (ch === 'e' || ch === 'E') { + string += ch; + next(); + if (ch === '-' || ch === '+') { + string += ch; + next(); + } + while (ch >= '0' && ch <= '9') { + string += ch; + next(); + } + } + number = +string; + if (!isFinite(number)) { + error('Bad number'); + } else { + if (BigNumber == null) BigNumber = __nccwpck_require__(1259); + //if (number > 9007199254740992 || number < -9007199254740992) + // Bignumber has stricter check: everything with length > 15 digits disallowed + if (string.length > 15) + return _options.storeAsString + ? string + : _options.useNativeBigInt + ? BigInt(string) + : new BigNumber(string); + else + return !_options.alwaysParseAsBig + ? number + : _options.useNativeBigInt + ? BigInt(number) + : new BigNumber(number); + } + }, + string = function () { + // Parse a string value. + + var hex, + i, + string = '', + uffff; + + // When parsing for string values, we must look for " and \ characters. + + if (ch === '"') { + var startAt = at; + while (next()) { + if (ch === '"') { + if (at - 1 > startAt) string += text.substring(startAt, at - 1); + next(); + return string; + } + if (ch === '\\') { + if (at - 1 > startAt) string += text.substring(startAt, at - 1); + next(); + if (ch === 'u') { + uffff = 0; + for (i = 0; i < 4; i += 1) { + hex = parseInt(next(), 16); + if (!isFinite(hex)) { + break; + } + uffff = uffff * 16 + hex; + } + string += String.fromCharCode(uffff); + } else if (typeof escapee[ch] === 'string') { + string += escapee[ch]; + } else { + break; + } + startAt = at; + } + } + } + error('Bad string'); + }, + white = function () { + // Skip whitespace. + + while (ch && ch <= ' ') { + next(); + } + }, + word = function () { + // true, false, or null. + + switch (ch) { + case 't': + next('t'); + next('r'); + next('u'); + next('e'); + return true; + case 'f': + next('f'); + next('a'); + next('l'); + next('s'); + next('e'); + return false; + case 'n': + next('n'); + next('u'); + next('l'); + next('l'); + return null; + } + error("Unexpected '" + ch + "'"); + }, + value, // Place holder for the value function. + array = function () { + // Parse an array value. + + var array = []; + + if (ch === '[') { + next('['); + white(); + if (ch === ']') { + next(']'); + return array; // empty array + } + while (ch) { + array.push(value()); + white(); + if (ch === ']') { + next(']'); + return array; + } + next(','); + white(); + } + } + error('Bad array'); + }, + object = function () { + // Parse an object value. + + var key, + object = Object.create(null); + + if (ch === '{') { + next('{'); + white(); + if (ch === '}') { + next('}'); + return object; // empty object + } + while (ch) { + key = string(); + white(); + next(':'); + if ( + _options.strict === true && + Object.hasOwnProperty.call(object, key) + ) { + error('Duplicate key "' + key + '"'); + } + + if (suspectProtoRx.test(key) === true) { + if (_options.protoAction === 'error') { + error('Object contains forbidden prototype property'); + } else if (_options.protoAction === 'ignore') { + value(); + } else { + object[key] = value(); + } + } else if (suspectConstructorRx.test(key) === true) { + if (_options.constructorAction === 'error') { + error('Object contains forbidden constructor property'); + } else if (_options.constructorAction === 'ignore') { + value(); + } else { + object[key] = value(); + } + } else { + object[key] = value(); + } + + white(); + if (ch === '}') { + next('}'); + return object; + } + next(','); + white(); + } + } + error('Bad object'); + }; + + value = function () { + // Parse a JSON value. It could be an object, an array, a string, a number, + // or a word. + + white(); + switch (ch) { + case '{': + return object(); + case '[': + return array(); + case '"': + return string(); + case '-': + return number(); + default: + return ch >= '0' && ch <= '9' ? number() : word(); + } + }; + + // Return the json_parse function. It will have access to all of the above + // functions and variables. + + return function (source, reviver) { + var result; + + text = source + ''; + at = 0; + ch = ' '; + result = value(); + white(); + if (ch) { + error('Syntax error'); + } + + // If there is a reviver function, we recursively walk the new structure, + // passing each name/value pair to the reviver function for possible + // transformation, starting with a temporary root object that holds the result + // in an empty key. If there is not a reviver function, we simply return the + // result. + + return typeof reviver === 'function' + ? (function walk(holder, key) { + var k, + v, + value = holder[key]; + if (value && typeof value === 'object') { + Object.keys(value).forEach(function (k) { + v = walk(value, k); + if (v !== undefined) { + value[k] = v; + } else { + delete value[k]; + } + }); + } + return reviver.call(holder, key, value); + })({ '': result }, '') + : result; + }; +}; + +module.exports = json_parse; + + +/***/ }), + +/***/ 3651: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var BigNumber = __nccwpck_require__(1259); + +/* + json2.js + 2013-05-26 + + Public Domain. + + NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK. + + See http://www.JSON.org/js.html + + + This code should be minified before deployment. + See http://javascript.crockford.com/jsmin.html + + USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO + NOT CONTROL. + + + This file creates a global JSON object containing two methods: stringify + and parse. + + JSON.stringify(value, replacer, space) + value any JavaScript value, usually an object or array. + + replacer an optional parameter that determines how object + values are stringified for objects. It can be a + function or an array of strings. + + space an optional parameter that specifies the indentation + of nested structures. If it is omitted, the text will + be packed without extra whitespace. If it is a number, + it will specify the number of spaces to indent at each + level. If it is a string (such as '\t' or ' '), + it contains the characters used to indent at each level. + + This method produces a JSON text from a JavaScript value. + + When an object value is found, if the object contains a toJSON + method, its toJSON method will be called and the result will be + stringified. A toJSON method does not serialize: it returns the + value represented by the name/value pair that should be serialized, + or undefined if nothing should be serialized. The toJSON method + will be passed the key associated with the value, and this will be + bound to the value + + For example, this would serialize Dates as ISO strings. + + Date.prototype.toJSON = function (key) { + function f(n) { + // Format integers to have at least two digits. + return n < 10 ? '0' + n : n; + } + + return this.getUTCFullYear() + '-' + + f(this.getUTCMonth() + 1) + '-' + + f(this.getUTCDate()) + 'T' + + f(this.getUTCHours()) + ':' + + f(this.getUTCMinutes()) + ':' + + f(this.getUTCSeconds()) + 'Z'; + }; + + You can provide an optional replacer method. It will be passed the + key and value of each member, with this bound to the containing + object. The value that is returned from your method will be + serialized. If your method returns undefined, then the member will + be excluded from the serialization. + + If the replacer parameter is an array of strings, then it will be + used to select the members to be serialized. It filters the results + such that only members with keys listed in the replacer array are + stringified. + + Values that do not have JSON representations, such as undefined or + functions, will not be serialized. Such values in objects will be + dropped; in arrays they will be replaced with null. You can use + a replacer function to replace those with JSON values. + JSON.stringify(undefined) returns undefined. + + The optional space parameter produces a stringification of the + value that is filled with line breaks and indentation to make it + easier to read. + + If the space parameter is a non-empty string, then that string will + be used for indentation. If the space parameter is a number, then + the indentation will be that many spaces. + + Example: + + text = JSON.stringify(['e', {pluribus: 'unum'}]); + // text is '["e",{"pluribus":"unum"}]' + + + text = JSON.stringify(['e', {pluribus: 'unum'}], null, '\t'); + // text is '[\n\t"e",\n\t{\n\t\t"pluribus": "unum"\n\t}\n]' + + text = JSON.stringify([new Date()], function (key, value) { + return this[key] instanceof Date ? + 'Date(' + this[key] + ')' : value; + }); + // text is '["Date(---current time---)"]' + + + JSON.parse(text, reviver) + This method parses a JSON text to produce an object or array. + It can throw a SyntaxError exception. + + The optional reviver parameter is a function that can filter and + transform the results. It receives each of the keys and values, + and its return value is used instead of the original value. + If it returns what it received, then the structure is not modified. + If it returns undefined then the member is deleted. + + Example: + + // Parse the text. Values that look like ISO date strings will + // be converted to Date objects. + + myData = JSON.parse(text, function (key, value) { + var a; + if (typeof value === 'string') { + a = +/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value); + if (a) { + return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4], + +a[5], +a[6])); + } + } + return value; + }); + + myData = JSON.parse('["Date(09/09/2001)"]', function (key, value) { + var d; + if (typeof value === 'string' && + value.slice(0, 5) === 'Date(' && + value.slice(-1) === ')') { + d = new Date(value.slice(5, -1)); + if (d) { + return d; + } + } + return value; + }); + + + This is a reference implementation. You are free to copy, modify, or + redistribute. +*/ + +/*jslint evil: true, regexp: true */ + +/*members "", "\b", "\t", "\n", "\f", "\r", "\"", JSON, "\\", apply, + call, charCodeAt, getUTCDate, getUTCFullYear, getUTCHours, + getUTCMinutes, getUTCMonth, getUTCSeconds, hasOwnProperty, join, + lastIndex, length, parse, prototype, push, replace, slice, stringify, + test, toJSON, toString, valueOf +*/ + + +// Create a JSON object only if one does not already exist. We create the +// methods in a closure to avoid creating global variables. + +var JSON = module.exports; + +(function () { + 'use strict'; + + function f(n) { + // Format integers to have at least two digits. + return n < 10 ? '0' + n : n; + } + + var cx = /[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, + escapable = /[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, + gap, + indent, + meta = { // table of character substitutions + '\b': '\\b', + '\t': '\\t', + '\n': '\\n', + '\f': '\\f', + '\r': '\\r', + '"' : '\\"', + '\\': '\\\\' + }, + rep; + + + function quote(string) { + +// If the string contains no control characters, no quote characters, and no +// backslash characters, then we can safely slap some quotes around it. +// Otherwise we must also replace the offending characters with safe escape +// sequences. + + escapable.lastIndex = 0; + return escapable.test(string) ? '"' + string.replace(escapable, function (a) { + var c = meta[a]; + return typeof c === 'string' + ? c + : '\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4); + }) + '"' : '"' + string + '"'; + } + + + function str(key, holder) { + +// Produce a string from holder[key]. + + var i, // The loop counter. + k, // The member key. + v, // The member value. + length, + mind = gap, + partial, + value = holder[key], + isBigNumber = value != null && (value instanceof BigNumber || BigNumber.isBigNumber(value)); + +// If the value has a toJSON method, call it to obtain a replacement value. + + if (value && typeof value === 'object' && + typeof value.toJSON === 'function') { + value = value.toJSON(key); + } + +// If we were called with a replacer function, then call the replacer to +// obtain a replacement value. + + if (typeof rep === 'function') { + value = rep.call(holder, key, value); + } + +// What happens next depends on the value's type. + + switch (typeof value) { + case 'string': + if (isBigNumber) { + return value; + } else { + return quote(value); + } + + case 'number': + +// JSON numbers must be finite. Encode non-finite numbers as null. + + return isFinite(value) ? String(value) : 'null'; + + case 'boolean': + case 'null': + case 'bigint': + +// If the value is a boolean or null, convert it to a string. Note: +// typeof null does not produce 'null'. The case is included here in +// the remote chance that this gets fixed someday. + + return String(value); + +// If the type is 'object', we might be dealing with an object or an array or +// null. + + case 'object': + +// Due to a specification blunder in ECMAScript, typeof null is 'object', +// so watch out for that case. + + if (!value) { + return 'null'; + } + +// Make an array to hold the partial results of stringifying this object value. + + gap += indent; + partial = []; + +// Is the value an array? + + if (Object.prototype.toString.apply(value) === '[object Array]') { + +// The value is an array. Stringify every element. Use null as a placeholder +// for non-JSON values. + + length = value.length; + for (i = 0; i < length; i += 1) { + partial[i] = str(i, value) || 'null'; + } + +// Join all of the elements together, separated with commas, and wrap them in +// brackets. + + v = partial.length === 0 + ? '[]' + : gap + ? '[\n' + gap + partial.join(',\n' + gap) + '\n' + mind + ']' + : '[' + partial.join(',') + ']'; + gap = mind; + return v; + } + +// If the replacer is an array, use it to select the members to be stringified. + + if (rep && typeof rep === 'object') { + length = rep.length; + for (i = 0; i < length; i += 1) { + if (typeof rep[i] === 'string') { + k = rep[i]; + v = str(k, value); + if (v) { + partial.push(quote(k) + (gap ? ': ' : ':') + v); + } + } + } + } else { + +// Otherwise, iterate through all of the keys in the object. + + Object.keys(value).forEach(function(k) { + var v = str(k, value); + if (v) { + partial.push(quote(k) + (gap ? ': ' : ':') + v); + } + }); + } + +// Join all of the member texts together, separated with commas, +// and wrap them in braces. + + v = partial.length === 0 + ? '{}' + : gap + ? '{\n' + gap + partial.join(',\n' + gap) + '\n' + mind + '}' + : '{' + partial.join(',') + '}'; + gap = mind; + return v; + } + } + +// If the JSON object does not yet have a stringify method, give it one. + + if (typeof JSON.stringify !== 'function') { + JSON.stringify = function (value, replacer, space) { + +// The stringify method takes a value and an optional replacer, and an optional +// space parameter, and returns a JSON text. The replacer can be a function +// that can replace values, or an array of strings that will select the keys. +// A default replacer method can be provided. Use of the space parameter can +// produce text that is more easily readable. + + var i; + gap = ''; + indent = ''; + +// If the space parameter is a number, make an indent string containing that +// many spaces. + + if (typeof space === 'number') { + for (i = 0; i < space; i += 1) { + indent += ' '; + } + +// If the space parameter is a string, it will be used as the indent string. + + } else if (typeof space === 'string') { + indent = space; + } + +// If there is a replacer, it must be a function or an array. +// Otherwise, throw an error. + + rep = replacer; + if (replacer && typeof replacer !== 'function' && + (typeof replacer !== 'object' || + typeof replacer.length !== 'number')) { + throw new Error('JSON.stringify'); + } + +// Make a fake root object containing our value under the key of ''. +// Return the result of stringifying the value. + + return str('', {'': value}); + }; + } +}()); + + +/***/ }), + +/***/ 8622: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var bufferEqual = __nccwpck_require__(9732); +var Buffer = (__nccwpck_require__(3058).Buffer); +var crypto = __nccwpck_require__(6982); +var formatEcdsa = __nccwpck_require__(325); +var util = __nccwpck_require__(9023); + +var MSG_INVALID_ALGORITHM = '"%s" is not a valid algorithm.\n Supported algorithms are:\n "HS256", "HS384", "HS512", "RS256", "RS384", "RS512", "PS256", "PS384", "PS512", "ES256", "ES384", "ES512" and "none".' +var MSG_INVALID_SECRET = 'secret must be a string or buffer'; +var MSG_INVALID_VERIFIER_KEY = 'key must be a string or a buffer'; +var MSG_INVALID_SIGNER_KEY = 'key must be a string, a buffer or an object'; + +var supportsKeyObjects = typeof crypto.createPublicKey === 'function'; +if (supportsKeyObjects) { + MSG_INVALID_VERIFIER_KEY += ' or a KeyObject'; + MSG_INVALID_SECRET += 'or a KeyObject'; +} + +function checkIsPublicKey(key) { + if (Buffer.isBuffer(key)) { + return; + } + + if (typeof key === 'string') { + return; + } + + if (!supportsKeyObjects) { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key !== 'object') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key.type !== 'string') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key.asymmetricKeyType !== 'string') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key.export !== 'function') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } +}; + +function checkIsPrivateKey(key) { + if (Buffer.isBuffer(key)) { + return; + } + + if (typeof key === 'string') { + return; + } + + if (typeof key === 'object') { + return; + } + + throw typeError(MSG_INVALID_SIGNER_KEY); +}; + +function checkIsSecretKey(key) { + if (Buffer.isBuffer(key)) { + return; + } + + if (typeof key === 'string') { + return key; + } + + if (!supportsKeyObjects) { + throw typeError(MSG_INVALID_SECRET); + } + + if (typeof key !== 'object') { + throw typeError(MSG_INVALID_SECRET); + } + + if (key.type !== 'secret') { + throw typeError(MSG_INVALID_SECRET); + } + + if (typeof key.export !== 'function') { + throw typeError(MSG_INVALID_SECRET); + } +} + +function fromBase64(base64) { + return base64 + .replace(/=/g, '') + .replace(/\+/g, '-') + .replace(/\//g, '_'); +} + +function toBase64(base64url) { + base64url = base64url.toString(); + + var padding = 4 - base64url.length % 4; + if (padding !== 4) { + for (var i = 0; i < padding; ++i) { + base64url += '='; + } + } + + return base64url + .replace(/\-/g, '+') + .replace(/_/g, '/'); +} + +function typeError(template) { + var args = [].slice.call(arguments, 1); + var errMsg = util.format.bind(util, template).apply(null, args); + return new TypeError(errMsg); +} + +function bufferOrString(obj) { + return Buffer.isBuffer(obj) || typeof obj === 'string'; +} + +function normalizeInput(thing) { + if (!bufferOrString(thing)) + thing = JSON.stringify(thing); + return thing; +} + +function createHmacSigner(bits) { + return function sign(thing, secret) { + checkIsSecretKey(secret); + thing = normalizeInput(thing); + var hmac = crypto.createHmac('sha' + bits, secret); + var sig = (hmac.update(thing), hmac.digest('base64')) + return fromBase64(sig); + } +} + +function createHmacVerifier(bits) { + return function verify(thing, signature, secret) { + var computedSig = createHmacSigner(bits)(thing, secret); + return bufferEqual(Buffer.from(signature), Buffer.from(computedSig)); + } +} + +function createKeySigner(bits) { + return function sign(thing, privateKey) { + checkIsPrivateKey(privateKey); + thing = normalizeInput(thing); + // Even though we are specifying "RSA" here, this works with ECDSA + // keys as well. + var signer = crypto.createSign('RSA-SHA' + bits); + var sig = (signer.update(thing), signer.sign(privateKey, 'base64')); + return fromBase64(sig); + } +} + +function createKeyVerifier(bits) { + return function verify(thing, signature, publicKey) { + checkIsPublicKey(publicKey); + thing = normalizeInput(thing); + signature = toBase64(signature); + var verifier = crypto.createVerify('RSA-SHA' + bits); + verifier.update(thing); + return verifier.verify(publicKey, signature, 'base64'); + } +} + +function createPSSKeySigner(bits) { + return function sign(thing, privateKey) { + checkIsPrivateKey(privateKey); + thing = normalizeInput(thing); + var signer = crypto.createSign('RSA-SHA' + bits); + var sig = (signer.update(thing), signer.sign({ + key: privateKey, + padding: crypto.constants.RSA_PKCS1_PSS_PADDING, + saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST + }, 'base64')); + return fromBase64(sig); + } +} + +function createPSSKeyVerifier(bits) { + return function verify(thing, signature, publicKey) { + checkIsPublicKey(publicKey); + thing = normalizeInput(thing); + signature = toBase64(signature); + var verifier = crypto.createVerify('RSA-SHA' + bits); + verifier.update(thing); + return verifier.verify({ + key: publicKey, + padding: crypto.constants.RSA_PKCS1_PSS_PADDING, + saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST + }, signature, 'base64'); + } +} + +function createECDSASigner(bits) { + var inner = createKeySigner(bits); + return function sign() { + var signature = inner.apply(null, arguments); + signature = formatEcdsa.derToJose(signature, 'ES' + bits); + return signature; + }; +} + +function createECDSAVerifer(bits) { + var inner = createKeyVerifier(bits); + return function verify(thing, signature, publicKey) { + signature = formatEcdsa.joseToDer(signature, 'ES' + bits).toString('base64'); + var result = inner(thing, signature, publicKey); + return result; + }; +} + +function createNoneSigner() { + return function sign() { + return ''; + } +} + +function createNoneVerifier() { + return function verify(thing, signature) { + return signature === ''; + } +} + +module.exports = function jwa(algorithm) { + var signerFactories = { + hs: createHmacSigner, + rs: createKeySigner, + ps: createPSSKeySigner, + es: createECDSASigner, + none: createNoneSigner, + } + var verifierFactories = { + hs: createHmacVerifier, + rs: createKeyVerifier, + ps: createPSSKeyVerifier, + es: createECDSAVerifer, + none: createNoneVerifier, + } + var match = algorithm.match(/^(RS|PS|ES|HS)(256|384|512)$|^(none)$/); + if (!match) + throw typeError(MSG_INVALID_ALGORITHM, algorithm); + var algo = (match[1] || match[3]).toLowerCase(); + var bits = match[2]; + + return { + sign: signerFactories[algo](bits), + verify: verifierFactories[algo](bits), + } +}; + + +/***/ }), + +/***/ 3324: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +/*global exports*/ +var SignStream = __nccwpck_require__(8600); +var VerifyStream = __nccwpck_require__(4368); + +var ALGORITHMS = [ + 'HS256', 'HS384', 'HS512', + 'RS256', 'RS384', 'RS512', + 'PS256', 'PS384', 'PS512', + 'ES256', 'ES384', 'ES512' +]; + +exports.ALGORITHMS = ALGORITHMS; +exports.sign = SignStream.sign; +exports.verify = VerifyStream.verify; +exports.decode = VerifyStream.decode; +exports.isValid = VerifyStream.isValid; +exports.createSign = function createSign(opts) { + return new SignStream(opts); +}; +exports.createVerify = function createVerify(opts) { + return new VerifyStream(opts); +}; + + +/***/ }), + +/***/ 1831: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/*global module, process*/ +var Buffer = (__nccwpck_require__(3058).Buffer); +var Stream = __nccwpck_require__(2203); +var util = __nccwpck_require__(9023); + +function DataStream(data) { + this.buffer = null; + this.writable = true; + this.readable = true; + + // No input + if (!data) { + this.buffer = Buffer.alloc(0); + return this; + } + + // Stream + if (typeof data.pipe === 'function') { + this.buffer = Buffer.alloc(0); + data.pipe(this); + return this; + } + + // Buffer or String + // or Object (assumedly a passworded key) + if (data.length || typeof data === 'object') { + this.buffer = data; + this.writable = false; + process.nextTick(function () { + this.emit('end', data); + this.readable = false; + this.emit('close'); + }.bind(this)); + return this; + } + + throw new TypeError('Unexpected data type ('+ typeof data + ')'); +} +util.inherits(DataStream, Stream); + +DataStream.prototype.write = function write(data) { + this.buffer = Buffer.concat([this.buffer, Buffer.from(data)]); + this.emit('data', data); +}; + +DataStream.prototype.end = function end(data) { + if (data) + this.write(data); + this.emit('end', data); + this.emit('close'); + this.writable = false; + this.readable = false; +}; + +module.exports = DataStream; + + +/***/ }), + +/***/ 8600: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/*global module*/ +var Buffer = (__nccwpck_require__(3058).Buffer); +var DataStream = __nccwpck_require__(1831); +var jwa = __nccwpck_require__(8622); +var Stream = __nccwpck_require__(2203); +var toString = __nccwpck_require__(5126); +var util = __nccwpck_require__(9023); + +function base64url(string, encoding) { + return Buffer + .from(string, encoding) + .toString('base64') + .replace(/=/g, '') + .replace(/\+/g, '-') + .replace(/\//g, '_'); +} + +function jwsSecuredInput(header, payload, encoding) { + encoding = encoding || 'utf8'; + var encodedHeader = base64url(toString(header), 'binary'); + var encodedPayload = base64url(toString(payload), encoding); + return util.format('%s.%s', encodedHeader, encodedPayload); +} + +function jwsSign(opts) { + var header = opts.header; + var payload = opts.payload; + var secretOrKey = opts.secret || opts.privateKey; + var encoding = opts.encoding; + var algo = jwa(header.alg); + var securedInput = jwsSecuredInput(header, payload, encoding); + var signature = algo.sign(securedInput, secretOrKey); + return util.format('%s.%s', securedInput, signature); +} + +function SignStream(opts) { + var secret = opts.secret||opts.privateKey||opts.key; + var secretStream = new DataStream(secret); + this.readable = true; + this.header = opts.header; + this.encoding = opts.encoding; + this.secret = this.privateKey = this.key = secretStream; + this.payload = new DataStream(opts.payload); + this.secret.once('close', function () { + if (!this.payload.writable && this.readable) + this.sign(); + }.bind(this)); + + this.payload.once('close', function () { + if (!this.secret.writable && this.readable) + this.sign(); + }.bind(this)); +} +util.inherits(SignStream, Stream); + +SignStream.prototype.sign = function sign() { + try { + var signature = jwsSign({ + header: this.header, + payload: this.payload.buffer, + secret: this.secret.buffer, + encoding: this.encoding + }); + this.emit('done', signature); + this.emit('data', signature); + this.emit('end'); + this.readable = false; + return signature; + } catch (e) { + this.readable = false; + this.emit('error', e); + this.emit('close'); + } +}; + +SignStream.sign = jwsSign; + +module.exports = SignStream; + + +/***/ }), + +/***/ 5126: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/*global module*/ +var Buffer = (__nccwpck_require__(181).Buffer); + +module.exports = function toString(obj) { + if (typeof obj === 'string') + return obj; + if (typeof obj === 'number' || Buffer.isBuffer(obj)) + return obj.toString(); + return JSON.stringify(obj); +}; + + +/***/ }), + +/***/ 4368: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/*global module*/ +var Buffer = (__nccwpck_require__(3058).Buffer); +var DataStream = __nccwpck_require__(1831); +var jwa = __nccwpck_require__(8622); +var Stream = __nccwpck_require__(2203); +var toString = __nccwpck_require__(5126); +var util = __nccwpck_require__(9023); +var JWS_REGEX = /^[a-zA-Z0-9\-_]+?\.[a-zA-Z0-9\-_]+?\.([a-zA-Z0-9\-_]+)?$/; + +function isObject(thing) { + return Object.prototype.toString.call(thing) === '[object Object]'; +} + +function safeJsonParse(thing) { + if (isObject(thing)) + return thing; + try { return JSON.parse(thing); } + catch (e) { return undefined; } +} + +function headerFromJWS(jwsSig) { + var encodedHeader = jwsSig.split('.', 1)[0]; + return safeJsonParse(Buffer.from(encodedHeader, 'base64').toString('binary')); +} + +function securedInputFromJWS(jwsSig) { + return jwsSig.split('.', 2).join('.'); +} + +function signatureFromJWS(jwsSig) { + return jwsSig.split('.')[2]; +} + +function payloadFromJWS(jwsSig, encoding) { + encoding = encoding || 'utf8'; + var payload = jwsSig.split('.')[1]; + return Buffer.from(payload, 'base64').toString(encoding); +} + +function isValidJws(string) { + return JWS_REGEX.test(string) && !!headerFromJWS(string); +} + +function jwsVerify(jwsSig, algorithm, secretOrKey) { + if (!algorithm) { + var err = new Error("Missing algorithm parameter for jws.verify"); + err.code = "MISSING_ALGORITHM"; + throw err; + } + jwsSig = toString(jwsSig); + var signature = signatureFromJWS(jwsSig); + var securedInput = securedInputFromJWS(jwsSig); + var algo = jwa(algorithm); + return algo.verify(securedInput, signature, secretOrKey); +} + +function jwsDecode(jwsSig, opts) { + opts = opts || {}; + jwsSig = toString(jwsSig); + + if (!isValidJws(jwsSig)) + return null; + + var header = headerFromJWS(jwsSig); + + if (!header) + return null; + + var payload = payloadFromJWS(jwsSig); + if (header.typ === 'JWT' || opts.json) + payload = JSON.parse(payload, opts.encoding); + + return { + header: header, + payload: payload, + signature: signatureFromJWS(jwsSig) + }; +} + +function VerifyStream(opts) { + opts = opts || {}; + var secretOrKey = opts.secret||opts.publicKey||opts.key; + var secretStream = new DataStream(secretOrKey); + this.readable = true; + this.algorithm = opts.algorithm; + this.encoding = opts.encoding; + this.secret = this.publicKey = this.key = secretStream; + this.signature = new DataStream(opts.signature); + this.secret.once('close', function () { + if (!this.signature.writable && this.readable) + this.verify(); + }.bind(this)); + + this.signature.once('close', function () { + if (!this.secret.writable && this.readable) + this.verify(); + }.bind(this)); +} +util.inherits(VerifyStream, Stream); +VerifyStream.prototype.verify = function verify() { + try { + var valid = jwsVerify(this.signature.buffer, this.algorithm, this.key.buffer); + var obj = jwsDecode(this.signature.buffer, this.encoding); + this.emit('done', valid, obj); + this.emit('data', valid); + this.emit('end'); + this.readable = false; + return valid; + } catch (e) { + this.readable = false; + this.emit('error', e); + this.emit('close'); + } +}; + +VerifyStream.decode = jwsDecode; +VerifyStream.isValid = isValidJws; +VerifyStream.verify = jwsVerify; + +module.exports = VerifyStream; + + /***/ }), /***/ 9829: @@ -51564,6 +71403,137 @@ function populateMaps (extensions, types) { } +/***/ }), + +/***/ 4402: +/***/ ((module) => { + +"use strict"; + + +/** + * @param typeMap [Object] Map of MIME type -> Array[extensions] + * @param ... + */ +function Mime() { + this._types = Object.create(null); + this._extensions = Object.create(null); + + for (let i = 0; i < arguments.length; i++) { + this.define(arguments[i]); + } + + this.define = this.define.bind(this); + this.getType = this.getType.bind(this); + this.getExtension = this.getExtension.bind(this); +} + +/** + * Define mimetype -> extension mappings. Each key is a mime-type that maps + * to an array of extensions associated with the type. The first extension is + * used as the default extension for the type. + * + * e.g. mime.define({'audio/ogg', ['oga', 'ogg', 'spx']}); + * + * If a type declares an extension that has already been defined, an error will + * be thrown. To suppress this error and force the extension to be associated + * with the new type, pass `force`=true. Alternatively, you may prefix the + * extension with "*" to map the type to extension, without mapping the + * extension to the type. + * + * e.g. mime.define({'audio/wav', ['wav']}, {'audio/x-wav', ['*wav']}); + * + * + * @param map (Object) type definitions + * @param force (Boolean) if true, force overriding of existing definitions + */ +Mime.prototype.define = function(typeMap, force) { + for (let type in typeMap) { + let extensions = typeMap[type].map(function(t) { + return t.toLowerCase(); + }); + type = type.toLowerCase(); + + for (let i = 0; i < extensions.length; i++) { + const ext = extensions[i]; + + // '*' prefix = not the preferred type for this extension. So fixup the + // extension, and skip it. + if (ext[0] === '*') { + continue; + } + + if (!force && (ext in this._types)) { + throw new Error( + 'Attempt to change mapping for "' + ext + + '" extension from "' + this._types[ext] + '" to "' + type + + '". Pass `force=true` to allow this, otherwise remove "' + ext + + '" from the list of extensions for "' + type + '".' + ); + } + + this._types[ext] = type; + } + + // Use first extension as default + if (force || !this._extensions[type]) { + const ext = extensions[0]; + this._extensions[type] = (ext[0] !== '*') ? ext : ext.substr(1); + } + } +}; + +/** + * Lookup a mime type based on extension + */ +Mime.prototype.getType = function(path) { + path = String(path); + let last = path.replace(/^.*[/\\]/, '').toLowerCase(); + let ext = last.replace(/^.*\./, '').toLowerCase(); + + let hasPath = last.length < path.length; + let hasDot = ext.length < last.length - 1; + + return (hasDot || !hasPath) && this._types[ext] || null; +}; + +/** + * Return file extension associated with a mime type + */ +Mime.prototype.getExtension = function(type) { + type = /^\s*([^;\s]*)/.test(type) && RegExp.$1; + return type && this._extensions[type.toLowerCase()] || null; +}; + +module.exports = Mime; + + +/***/ }), + +/***/ 4900: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +let Mime = __nccwpck_require__(4402); +module.exports = new Mime(__nccwpck_require__(3725), __nccwpck_require__(8548)); + + +/***/ }), + +/***/ 8548: +/***/ ((module) => { + +module.exports = {"application/prs.cww":["cww"],"application/vnd.1000minds.decision-model+xml":["1km"],"application/vnd.3gpp.pic-bw-large":["plb"],"application/vnd.3gpp.pic-bw-small":["psb"],"application/vnd.3gpp.pic-bw-var":["pvb"],"application/vnd.3gpp2.tcap":["tcap"],"application/vnd.3m.post-it-notes":["pwn"],"application/vnd.accpac.simply.aso":["aso"],"application/vnd.accpac.simply.imp":["imp"],"application/vnd.acucobol":["acu"],"application/vnd.acucorp":["atc","acutc"],"application/vnd.adobe.air-application-installer-package+zip":["air"],"application/vnd.adobe.formscentral.fcdt":["fcdt"],"application/vnd.adobe.fxp":["fxp","fxpl"],"application/vnd.adobe.xdp+xml":["xdp"],"application/vnd.adobe.xfdf":["xfdf"],"application/vnd.ahead.space":["ahead"],"application/vnd.airzip.filesecure.azf":["azf"],"application/vnd.airzip.filesecure.azs":["azs"],"application/vnd.amazon.ebook":["azw"],"application/vnd.americandynamics.acc":["acc"],"application/vnd.amiga.ami":["ami"],"application/vnd.android.package-archive":["apk"],"application/vnd.anser-web-certificate-issue-initiation":["cii"],"application/vnd.anser-web-funds-transfer-initiation":["fti"],"application/vnd.antix.game-component":["atx"],"application/vnd.apple.installer+xml":["mpkg"],"application/vnd.apple.keynote":["key"],"application/vnd.apple.mpegurl":["m3u8"],"application/vnd.apple.numbers":["numbers"],"application/vnd.apple.pages":["pages"],"application/vnd.apple.pkpass":["pkpass"],"application/vnd.aristanetworks.swi":["swi"],"application/vnd.astraea-software.iota":["iota"],"application/vnd.audiograph":["aep"],"application/vnd.balsamiq.bmml+xml":["bmml"],"application/vnd.blueice.multipass":["mpm"],"application/vnd.bmi":["bmi"],"application/vnd.businessobjects":["rep"],"application/vnd.chemdraw+xml":["cdxml"],"application/vnd.chipnuts.karaoke-mmd":["mmd"],"application/vnd.cinderella":["cdy"],"application/vnd.citationstyles.style+xml":["csl"],"application/vnd.claymore":["cla"],"application/vnd.cloanto.rp9":["rp9"],"application/vnd.clonk.c4group":["c4g","c4d","c4f","c4p","c4u"],"application/vnd.cluetrust.cartomobile-config":["c11amc"],"application/vnd.cluetrust.cartomobile-config-pkg":["c11amz"],"application/vnd.commonspace":["csp"],"application/vnd.contact.cmsg":["cdbcmsg"],"application/vnd.cosmocaller":["cmc"],"application/vnd.crick.clicker":["clkx"],"application/vnd.crick.clicker.keyboard":["clkk"],"application/vnd.crick.clicker.palette":["clkp"],"application/vnd.crick.clicker.template":["clkt"],"application/vnd.crick.clicker.wordbank":["clkw"],"application/vnd.criticaltools.wbs+xml":["wbs"],"application/vnd.ctc-posml":["pml"],"application/vnd.cups-ppd":["ppd"],"application/vnd.curl.car":["car"],"application/vnd.curl.pcurl":["pcurl"],"application/vnd.dart":["dart"],"application/vnd.data-vision.rdz":["rdz"],"application/vnd.dbf":["dbf"],"application/vnd.dece.data":["uvf","uvvf","uvd","uvvd"],"application/vnd.dece.ttml+xml":["uvt","uvvt"],"application/vnd.dece.unspecified":["uvx","uvvx"],"application/vnd.dece.zip":["uvz","uvvz"],"application/vnd.denovo.fcselayout-link":["fe_launch"],"application/vnd.dna":["dna"],"application/vnd.dolby.mlp":["mlp"],"application/vnd.dpgraph":["dpg"],"application/vnd.dreamfactory":["dfac"],"application/vnd.ds-keypoint":["kpxx"],"application/vnd.dvb.ait":["ait"],"application/vnd.dvb.service":["svc"],"application/vnd.dynageo":["geo"],"application/vnd.ecowin.chart":["mag"],"application/vnd.enliven":["nml"],"application/vnd.epson.esf":["esf"],"application/vnd.epson.msf":["msf"],"application/vnd.epson.quickanime":["qam"],"application/vnd.epson.salt":["slt"],"application/vnd.epson.ssf":["ssf"],"application/vnd.eszigno3+xml":["es3","et3"],"application/vnd.ezpix-album":["ez2"],"application/vnd.ezpix-package":["ez3"],"application/vnd.fdf":["fdf"],"application/vnd.fdsn.mseed":["mseed"],"application/vnd.fdsn.seed":["seed","dataless"],"application/vnd.flographit":["gph"],"application/vnd.fluxtime.clip":["ftc"],"application/vnd.framemaker":["fm","frame","maker","book"],"application/vnd.frogans.fnc":["fnc"],"application/vnd.frogans.ltf":["ltf"],"application/vnd.fsc.weblaunch":["fsc"],"application/vnd.fujitsu.oasys":["oas"],"application/vnd.fujitsu.oasys2":["oa2"],"application/vnd.fujitsu.oasys3":["oa3"],"application/vnd.fujitsu.oasysgp":["fg5"],"application/vnd.fujitsu.oasysprs":["bh2"],"application/vnd.fujixerox.ddd":["ddd"],"application/vnd.fujixerox.docuworks":["xdw"],"application/vnd.fujixerox.docuworks.binder":["xbd"],"application/vnd.fuzzysheet":["fzs"],"application/vnd.genomatix.tuxedo":["txd"],"application/vnd.geogebra.file":["ggb"],"application/vnd.geogebra.tool":["ggt"],"application/vnd.geometry-explorer":["gex","gre"],"application/vnd.geonext":["gxt"],"application/vnd.geoplan":["g2w"],"application/vnd.geospace":["g3w"],"application/vnd.gmx":["gmx"],"application/vnd.google-apps.document":["gdoc"],"application/vnd.google-apps.presentation":["gslides"],"application/vnd.google-apps.spreadsheet":["gsheet"],"application/vnd.google-earth.kml+xml":["kml"],"application/vnd.google-earth.kmz":["kmz"],"application/vnd.grafeq":["gqf","gqs"],"application/vnd.groove-account":["gac"],"application/vnd.groove-help":["ghf"],"application/vnd.groove-identity-message":["gim"],"application/vnd.groove-injector":["grv"],"application/vnd.groove-tool-message":["gtm"],"application/vnd.groove-tool-template":["tpl"],"application/vnd.groove-vcard":["vcg"],"application/vnd.hal+xml":["hal"],"application/vnd.handheld-entertainment+xml":["zmm"],"application/vnd.hbci":["hbci"],"application/vnd.hhe.lesson-player":["les"],"application/vnd.hp-hpgl":["hpgl"],"application/vnd.hp-hpid":["hpid"],"application/vnd.hp-hps":["hps"],"application/vnd.hp-jlyt":["jlt"],"application/vnd.hp-pcl":["pcl"],"application/vnd.hp-pclxl":["pclxl"],"application/vnd.hydrostatix.sof-data":["sfd-hdstx"],"application/vnd.ibm.minipay":["mpy"],"application/vnd.ibm.modcap":["afp","listafp","list3820"],"application/vnd.ibm.rights-management":["irm"],"application/vnd.ibm.secure-container":["sc"],"application/vnd.iccprofile":["icc","icm"],"application/vnd.igloader":["igl"],"application/vnd.immervision-ivp":["ivp"],"application/vnd.immervision-ivu":["ivu"],"application/vnd.insors.igm":["igm"],"application/vnd.intercon.formnet":["xpw","xpx"],"application/vnd.intergeo":["i2g"],"application/vnd.intu.qbo":["qbo"],"application/vnd.intu.qfx":["qfx"],"application/vnd.ipunplugged.rcprofile":["rcprofile"],"application/vnd.irepository.package+xml":["irp"],"application/vnd.is-xpr":["xpr"],"application/vnd.isac.fcs":["fcs"],"application/vnd.jam":["jam"],"application/vnd.jcp.javame.midlet-rms":["rms"],"application/vnd.jisp":["jisp"],"application/vnd.joost.joda-archive":["joda"],"application/vnd.kahootz":["ktz","ktr"],"application/vnd.kde.karbon":["karbon"],"application/vnd.kde.kchart":["chrt"],"application/vnd.kde.kformula":["kfo"],"application/vnd.kde.kivio":["flw"],"application/vnd.kde.kontour":["kon"],"application/vnd.kde.kpresenter":["kpr","kpt"],"application/vnd.kde.kspread":["ksp"],"application/vnd.kde.kword":["kwd","kwt"],"application/vnd.kenameaapp":["htke"],"application/vnd.kidspiration":["kia"],"application/vnd.kinar":["kne","knp"],"application/vnd.koan":["skp","skd","skt","skm"],"application/vnd.kodak-descriptor":["sse"],"application/vnd.las.las+xml":["lasxml"],"application/vnd.llamagraphics.life-balance.desktop":["lbd"],"application/vnd.llamagraphics.life-balance.exchange+xml":["lbe"],"application/vnd.lotus-1-2-3":["123"],"application/vnd.lotus-approach":["apr"],"application/vnd.lotus-freelance":["pre"],"application/vnd.lotus-notes":["nsf"],"application/vnd.lotus-organizer":["org"],"application/vnd.lotus-screencam":["scm"],"application/vnd.lotus-wordpro":["lwp"],"application/vnd.macports.portpkg":["portpkg"],"application/vnd.mapbox-vector-tile":["mvt"],"application/vnd.mcd":["mcd"],"application/vnd.medcalcdata":["mc1"],"application/vnd.mediastation.cdkey":["cdkey"],"application/vnd.mfer":["mwf"],"application/vnd.mfmp":["mfm"],"application/vnd.micrografx.flo":["flo"],"application/vnd.micrografx.igx":["igx"],"application/vnd.mif":["mif"],"application/vnd.mobius.daf":["daf"],"application/vnd.mobius.dis":["dis"],"application/vnd.mobius.mbk":["mbk"],"application/vnd.mobius.mqy":["mqy"],"application/vnd.mobius.msl":["msl"],"application/vnd.mobius.plc":["plc"],"application/vnd.mobius.txf":["txf"],"application/vnd.mophun.application":["mpn"],"application/vnd.mophun.certificate":["mpc"],"application/vnd.mozilla.xul+xml":["xul"],"application/vnd.ms-artgalry":["cil"],"application/vnd.ms-cab-compressed":["cab"],"application/vnd.ms-excel":["xls","xlm","xla","xlc","xlt","xlw"],"application/vnd.ms-excel.addin.macroenabled.12":["xlam"],"application/vnd.ms-excel.sheet.binary.macroenabled.12":["xlsb"],"application/vnd.ms-excel.sheet.macroenabled.12":["xlsm"],"application/vnd.ms-excel.template.macroenabled.12":["xltm"],"application/vnd.ms-fontobject":["eot"],"application/vnd.ms-htmlhelp":["chm"],"application/vnd.ms-ims":["ims"],"application/vnd.ms-lrm":["lrm"],"application/vnd.ms-officetheme":["thmx"],"application/vnd.ms-outlook":["msg"],"application/vnd.ms-pki.seccat":["cat"],"application/vnd.ms-pki.stl":["*stl"],"application/vnd.ms-powerpoint":["ppt","pps","pot"],"application/vnd.ms-powerpoint.addin.macroenabled.12":["ppam"],"application/vnd.ms-powerpoint.presentation.macroenabled.12":["pptm"],"application/vnd.ms-powerpoint.slide.macroenabled.12":["sldm"],"application/vnd.ms-powerpoint.slideshow.macroenabled.12":["ppsm"],"application/vnd.ms-powerpoint.template.macroenabled.12":["potm"],"application/vnd.ms-project":["mpp","mpt"],"application/vnd.ms-word.document.macroenabled.12":["docm"],"application/vnd.ms-word.template.macroenabled.12":["dotm"],"application/vnd.ms-works":["wps","wks","wcm","wdb"],"application/vnd.ms-wpl":["wpl"],"application/vnd.ms-xpsdocument":["xps"],"application/vnd.mseq":["mseq"],"application/vnd.musician":["mus"],"application/vnd.muvee.style":["msty"],"application/vnd.mynfc":["taglet"],"application/vnd.neurolanguage.nlu":["nlu"],"application/vnd.nitf":["ntf","nitf"],"application/vnd.noblenet-directory":["nnd"],"application/vnd.noblenet-sealer":["nns"],"application/vnd.noblenet-web":["nnw"],"application/vnd.nokia.n-gage.ac+xml":["*ac"],"application/vnd.nokia.n-gage.data":["ngdat"],"application/vnd.nokia.n-gage.symbian.install":["n-gage"],"application/vnd.nokia.radio-preset":["rpst"],"application/vnd.nokia.radio-presets":["rpss"],"application/vnd.novadigm.edm":["edm"],"application/vnd.novadigm.edx":["edx"],"application/vnd.novadigm.ext":["ext"],"application/vnd.oasis.opendocument.chart":["odc"],"application/vnd.oasis.opendocument.chart-template":["otc"],"application/vnd.oasis.opendocument.database":["odb"],"application/vnd.oasis.opendocument.formula":["odf"],"application/vnd.oasis.opendocument.formula-template":["odft"],"application/vnd.oasis.opendocument.graphics":["odg"],"application/vnd.oasis.opendocument.graphics-template":["otg"],"application/vnd.oasis.opendocument.image":["odi"],"application/vnd.oasis.opendocument.image-template":["oti"],"application/vnd.oasis.opendocument.presentation":["odp"],"application/vnd.oasis.opendocument.presentation-template":["otp"],"application/vnd.oasis.opendocument.spreadsheet":["ods"],"application/vnd.oasis.opendocument.spreadsheet-template":["ots"],"application/vnd.oasis.opendocument.text":["odt"],"application/vnd.oasis.opendocument.text-master":["odm"],"application/vnd.oasis.opendocument.text-template":["ott"],"application/vnd.oasis.opendocument.text-web":["oth"],"application/vnd.olpc-sugar":["xo"],"application/vnd.oma.dd2+xml":["dd2"],"application/vnd.openblox.game+xml":["obgx"],"application/vnd.openofficeorg.extension":["oxt"],"application/vnd.openstreetmap.data+xml":["osm"],"application/vnd.openxmlformats-officedocument.presentationml.presentation":["pptx"],"application/vnd.openxmlformats-officedocument.presentationml.slide":["sldx"],"application/vnd.openxmlformats-officedocument.presentationml.slideshow":["ppsx"],"application/vnd.openxmlformats-officedocument.presentationml.template":["potx"],"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet":["xlsx"],"application/vnd.openxmlformats-officedocument.spreadsheetml.template":["xltx"],"application/vnd.openxmlformats-officedocument.wordprocessingml.document":["docx"],"application/vnd.openxmlformats-officedocument.wordprocessingml.template":["dotx"],"application/vnd.osgeo.mapguide.package":["mgp"],"application/vnd.osgi.dp":["dp"],"application/vnd.osgi.subsystem":["esa"],"application/vnd.palm":["pdb","pqa","oprc"],"application/vnd.pawaafile":["paw"],"application/vnd.pg.format":["str"],"application/vnd.pg.osasli":["ei6"],"application/vnd.picsel":["efif"],"application/vnd.pmi.widget":["wg"],"application/vnd.pocketlearn":["plf"],"application/vnd.powerbuilder6":["pbd"],"application/vnd.previewsystems.box":["box"],"application/vnd.proteus.magazine":["mgz"],"application/vnd.publishare-delta-tree":["qps"],"application/vnd.pvi.ptid1":["ptid"],"application/vnd.quark.quarkxpress":["qxd","qxt","qwd","qwt","qxl","qxb"],"application/vnd.rar":["rar"],"application/vnd.realvnc.bed":["bed"],"application/vnd.recordare.musicxml":["mxl"],"application/vnd.recordare.musicxml+xml":["musicxml"],"application/vnd.rig.cryptonote":["cryptonote"],"application/vnd.rim.cod":["cod"],"application/vnd.rn-realmedia":["rm"],"application/vnd.rn-realmedia-vbr":["rmvb"],"application/vnd.route66.link66+xml":["link66"],"application/vnd.sailingtracker.track":["st"],"application/vnd.seemail":["see"],"application/vnd.sema":["sema"],"application/vnd.semd":["semd"],"application/vnd.semf":["semf"],"application/vnd.shana.informed.formdata":["ifm"],"application/vnd.shana.informed.formtemplate":["itp"],"application/vnd.shana.informed.interchange":["iif"],"application/vnd.shana.informed.package":["ipk"],"application/vnd.simtech-mindmapper":["twd","twds"],"application/vnd.smaf":["mmf"],"application/vnd.smart.teacher":["teacher"],"application/vnd.software602.filler.form+xml":["fo"],"application/vnd.solent.sdkm+xml":["sdkm","sdkd"],"application/vnd.spotfire.dxp":["dxp"],"application/vnd.spotfire.sfs":["sfs"],"application/vnd.stardivision.calc":["sdc"],"application/vnd.stardivision.draw":["sda"],"application/vnd.stardivision.impress":["sdd"],"application/vnd.stardivision.math":["smf"],"application/vnd.stardivision.writer":["sdw","vor"],"application/vnd.stardivision.writer-global":["sgl"],"application/vnd.stepmania.package":["smzip"],"application/vnd.stepmania.stepchart":["sm"],"application/vnd.sun.wadl+xml":["wadl"],"application/vnd.sun.xml.calc":["sxc"],"application/vnd.sun.xml.calc.template":["stc"],"application/vnd.sun.xml.draw":["sxd"],"application/vnd.sun.xml.draw.template":["std"],"application/vnd.sun.xml.impress":["sxi"],"application/vnd.sun.xml.impress.template":["sti"],"application/vnd.sun.xml.math":["sxm"],"application/vnd.sun.xml.writer":["sxw"],"application/vnd.sun.xml.writer.global":["sxg"],"application/vnd.sun.xml.writer.template":["stw"],"application/vnd.sus-calendar":["sus","susp"],"application/vnd.svd":["svd"],"application/vnd.symbian.install":["sis","sisx"],"application/vnd.syncml+xml":["xsm"],"application/vnd.syncml.dm+wbxml":["bdm"],"application/vnd.syncml.dm+xml":["xdm"],"application/vnd.syncml.dmddf+xml":["ddf"],"application/vnd.tao.intent-module-archive":["tao"],"application/vnd.tcpdump.pcap":["pcap","cap","dmp"],"application/vnd.tmobile-livetv":["tmo"],"application/vnd.trid.tpt":["tpt"],"application/vnd.triscape.mxs":["mxs"],"application/vnd.trueapp":["tra"],"application/vnd.ufdl":["ufd","ufdl"],"application/vnd.uiq.theme":["utz"],"application/vnd.umajin":["umj"],"application/vnd.unity":["unityweb"],"application/vnd.uoml+xml":["uoml"],"application/vnd.vcx":["vcx"],"application/vnd.visio":["vsd","vst","vss","vsw"],"application/vnd.visionary":["vis"],"application/vnd.vsf":["vsf"],"application/vnd.wap.wbxml":["wbxml"],"application/vnd.wap.wmlc":["wmlc"],"application/vnd.wap.wmlscriptc":["wmlsc"],"application/vnd.webturbo":["wtb"],"application/vnd.wolfram.player":["nbp"],"application/vnd.wordperfect":["wpd"],"application/vnd.wqd":["wqd"],"application/vnd.wt.stf":["stf"],"application/vnd.xara":["xar"],"application/vnd.xfdl":["xfdl"],"application/vnd.yamaha.hv-dic":["hvd"],"application/vnd.yamaha.hv-script":["hvs"],"application/vnd.yamaha.hv-voice":["hvp"],"application/vnd.yamaha.openscoreformat":["osf"],"application/vnd.yamaha.openscoreformat.osfpvg+xml":["osfpvg"],"application/vnd.yamaha.smaf-audio":["saf"],"application/vnd.yamaha.smaf-phrase":["spf"],"application/vnd.yellowriver-custom-menu":["cmp"],"application/vnd.zul":["zir","zirz"],"application/vnd.zzazz.deck+xml":["zaz"],"application/x-7z-compressed":["7z"],"application/x-abiword":["abw"],"application/x-ace-compressed":["ace"],"application/x-apple-diskimage":["*dmg"],"application/x-arj":["arj"],"application/x-authorware-bin":["aab","x32","u32","vox"],"application/x-authorware-map":["aam"],"application/x-authorware-seg":["aas"],"application/x-bcpio":["bcpio"],"application/x-bdoc":["*bdoc"],"application/x-bittorrent":["torrent"],"application/x-blorb":["blb","blorb"],"application/x-bzip":["bz"],"application/x-bzip2":["bz2","boz"],"application/x-cbr":["cbr","cba","cbt","cbz","cb7"],"application/x-cdlink":["vcd"],"application/x-cfs-compressed":["cfs"],"application/x-chat":["chat"],"application/x-chess-pgn":["pgn"],"application/x-chrome-extension":["crx"],"application/x-cocoa":["cco"],"application/x-conference":["nsc"],"application/x-cpio":["cpio"],"application/x-csh":["csh"],"application/x-debian-package":["*deb","udeb"],"application/x-dgc-compressed":["dgc"],"application/x-director":["dir","dcr","dxr","cst","cct","cxt","w3d","fgd","swa"],"application/x-doom":["wad"],"application/x-dtbncx+xml":["ncx"],"application/x-dtbook+xml":["dtb"],"application/x-dtbresource+xml":["res"],"application/x-dvi":["dvi"],"application/x-envoy":["evy"],"application/x-eva":["eva"],"application/x-font-bdf":["bdf"],"application/x-font-ghostscript":["gsf"],"application/x-font-linux-psf":["psf"],"application/x-font-pcf":["pcf"],"application/x-font-snf":["snf"],"application/x-font-type1":["pfa","pfb","pfm","afm"],"application/x-freearc":["arc"],"application/x-futuresplash":["spl"],"application/x-gca-compressed":["gca"],"application/x-glulx":["ulx"],"application/x-gnumeric":["gnumeric"],"application/x-gramps-xml":["gramps"],"application/x-gtar":["gtar"],"application/x-hdf":["hdf"],"application/x-httpd-php":["php"],"application/x-install-instructions":["install"],"application/x-iso9660-image":["*iso"],"application/x-iwork-keynote-sffkey":["*key"],"application/x-iwork-numbers-sffnumbers":["*numbers"],"application/x-iwork-pages-sffpages":["*pages"],"application/x-java-archive-diff":["jardiff"],"application/x-java-jnlp-file":["jnlp"],"application/x-keepass2":["kdbx"],"application/x-latex":["latex"],"application/x-lua-bytecode":["luac"],"application/x-lzh-compressed":["lzh","lha"],"application/x-makeself":["run"],"application/x-mie":["mie"],"application/x-mobipocket-ebook":["prc","mobi"],"application/x-ms-application":["application"],"application/x-ms-shortcut":["lnk"],"application/x-ms-wmd":["wmd"],"application/x-ms-wmz":["wmz"],"application/x-ms-xbap":["xbap"],"application/x-msaccess":["mdb"],"application/x-msbinder":["obd"],"application/x-mscardfile":["crd"],"application/x-msclip":["clp"],"application/x-msdos-program":["*exe"],"application/x-msdownload":["*exe","*dll","com","bat","*msi"],"application/x-msmediaview":["mvb","m13","m14"],"application/x-msmetafile":["*wmf","*wmz","*emf","emz"],"application/x-msmoney":["mny"],"application/x-mspublisher":["pub"],"application/x-msschedule":["scd"],"application/x-msterminal":["trm"],"application/x-mswrite":["wri"],"application/x-netcdf":["nc","cdf"],"application/x-ns-proxy-autoconfig":["pac"],"application/x-nzb":["nzb"],"application/x-perl":["pl","pm"],"application/x-pilot":["*prc","*pdb"],"application/x-pkcs12":["p12","pfx"],"application/x-pkcs7-certificates":["p7b","spc"],"application/x-pkcs7-certreqresp":["p7r"],"application/x-rar-compressed":["*rar"],"application/x-redhat-package-manager":["rpm"],"application/x-research-info-systems":["ris"],"application/x-sea":["sea"],"application/x-sh":["sh"],"application/x-shar":["shar"],"application/x-shockwave-flash":["swf"],"application/x-silverlight-app":["xap"],"application/x-sql":["sql"],"application/x-stuffit":["sit"],"application/x-stuffitx":["sitx"],"application/x-subrip":["srt"],"application/x-sv4cpio":["sv4cpio"],"application/x-sv4crc":["sv4crc"],"application/x-t3vm-image":["t3"],"application/x-tads":["gam"],"application/x-tar":["tar"],"application/x-tcl":["tcl","tk"],"application/x-tex":["tex"],"application/x-tex-tfm":["tfm"],"application/x-texinfo":["texinfo","texi"],"application/x-tgif":["*obj"],"application/x-ustar":["ustar"],"application/x-virtualbox-hdd":["hdd"],"application/x-virtualbox-ova":["ova"],"application/x-virtualbox-ovf":["ovf"],"application/x-virtualbox-vbox":["vbox"],"application/x-virtualbox-vbox-extpack":["vbox-extpack"],"application/x-virtualbox-vdi":["vdi"],"application/x-virtualbox-vhd":["vhd"],"application/x-virtualbox-vmdk":["vmdk"],"application/x-wais-source":["src"],"application/x-web-app-manifest+json":["webapp"],"application/x-x509-ca-cert":["der","crt","pem"],"application/x-xfig":["fig"],"application/x-xliff+xml":["*xlf"],"application/x-xpinstall":["xpi"],"application/x-xz":["xz"],"application/x-zmachine":["z1","z2","z3","z4","z5","z6","z7","z8"],"audio/vnd.dece.audio":["uva","uvva"],"audio/vnd.digital-winds":["eol"],"audio/vnd.dra":["dra"],"audio/vnd.dts":["dts"],"audio/vnd.dts.hd":["dtshd"],"audio/vnd.lucent.voice":["lvp"],"audio/vnd.ms-playready.media.pya":["pya"],"audio/vnd.nuera.ecelp4800":["ecelp4800"],"audio/vnd.nuera.ecelp7470":["ecelp7470"],"audio/vnd.nuera.ecelp9600":["ecelp9600"],"audio/vnd.rip":["rip"],"audio/x-aac":["aac"],"audio/x-aiff":["aif","aiff","aifc"],"audio/x-caf":["caf"],"audio/x-flac":["flac"],"audio/x-m4a":["*m4a"],"audio/x-matroska":["mka"],"audio/x-mpegurl":["m3u"],"audio/x-ms-wax":["wax"],"audio/x-ms-wma":["wma"],"audio/x-pn-realaudio":["ram","ra"],"audio/x-pn-realaudio-plugin":["rmp"],"audio/x-realaudio":["*ra"],"audio/x-wav":["*wav"],"chemical/x-cdx":["cdx"],"chemical/x-cif":["cif"],"chemical/x-cmdf":["cmdf"],"chemical/x-cml":["cml"],"chemical/x-csml":["csml"],"chemical/x-xyz":["xyz"],"image/prs.btif":["btif"],"image/prs.pti":["pti"],"image/vnd.adobe.photoshop":["psd"],"image/vnd.airzip.accelerator.azv":["azv"],"image/vnd.dece.graphic":["uvi","uvvi","uvg","uvvg"],"image/vnd.djvu":["djvu","djv"],"image/vnd.dvb.subtitle":["*sub"],"image/vnd.dwg":["dwg"],"image/vnd.dxf":["dxf"],"image/vnd.fastbidsheet":["fbs"],"image/vnd.fpx":["fpx"],"image/vnd.fst":["fst"],"image/vnd.fujixerox.edmics-mmr":["mmr"],"image/vnd.fujixerox.edmics-rlc":["rlc"],"image/vnd.microsoft.icon":["ico"],"image/vnd.ms-dds":["dds"],"image/vnd.ms-modi":["mdi"],"image/vnd.ms-photo":["wdp"],"image/vnd.net-fpx":["npx"],"image/vnd.pco.b16":["b16"],"image/vnd.tencent.tap":["tap"],"image/vnd.valve.source.texture":["vtf"],"image/vnd.wap.wbmp":["wbmp"],"image/vnd.xiff":["xif"],"image/vnd.zbrush.pcx":["pcx"],"image/x-3ds":["3ds"],"image/x-cmu-raster":["ras"],"image/x-cmx":["cmx"],"image/x-freehand":["fh","fhc","fh4","fh5","fh7"],"image/x-icon":["*ico"],"image/x-jng":["jng"],"image/x-mrsid-image":["sid"],"image/x-ms-bmp":["*bmp"],"image/x-pcx":["*pcx"],"image/x-pict":["pic","pct"],"image/x-portable-anymap":["pnm"],"image/x-portable-bitmap":["pbm"],"image/x-portable-graymap":["pgm"],"image/x-portable-pixmap":["ppm"],"image/x-rgb":["rgb"],"image/x-tga":["tga"],"image/x-xbitmap":["xbm"],"image/x-xpixmap":["xpm"],"image/x-xwindowdump":["xwd"],"message/vnd.wfa.wsc":["wsc"],"model/vnd.collada+xml":["dae"],"model/vnd.dwf":["dwf"],"model/vnd.gdl":["gdl"],"model/vnd.gtw":["gtw"],"model/vnd.mts":["mts"],"model/vnd.opengex":["ogex"],"model/vnd.parasolid.transmit.binary":["x_b"],"model/vnd.parasolid.transmit.text":["x_t"],"model/vnd.sap.vds":["vds"],"model/vnd.usdz+zip":["usdz"],"model/vnd.valve.source.compiled-map":["bsp"],"model/vnd.vtu":["vtu"],"text/prs.lines.tag":["dsc"],"text/vnd.curl":["curl"],"text/vnd.curl.dcurl":["dcurl"],"text/vnd.curl.mcurl":["mcurl"],"text/vnd.curl.scurl":["scurl"],"text/vnd.dvb.subtitle":["sub"],"text/vnd.fly":["fly"],"text/vnd.fmi.flexstor":["flx"],"text/vnd.graphviz":["gv"],"text/vnd.in3d.3dml":["3dml"],"text/vnd.in3d.spot":["spot"],"text/vnd.sun.j2me.app-descriptor":["jad"],"text/vnd.wap.wml":["wml"],"text/vnd.wap.wmlscript":["wmls"],"text/x-asm":["s","asm"],"text/x-c":["c","cc","cxx","cpp","h","hh","dic"],"text/x-component":["htc"],"text/x-fortran":["f","for","f77","f90"],"text/x-handlebars-template":["hbs"],"text/x-java-source":["java"],"text/x-lua":["lua"],"text/x-markdown":["mkd"],"text/x-nfo":["nfo"],"text/x-opml":["opml"],"text/x-org":["*org"],"text/x-pascal":["p","pas"],"text/x-processing":["pde"],"text/x-sass":["sass"],"text/x-scss":["scss"],"text/x-setext":["etx"],"text/x-sfv":["sfv"],"text/x-suse-ymp":["ymp"],"text/x-uuencode":["uu"],"text/x-vcalendar":["vcs"],"text/x-vcard":["vcf"],"video/vnd.dece.hd":["uvh","uvvh"],"video/vnd.dece.mobile":["uvm","uvvm"],"video/vnd.dece.pd":["uvp","uvvp"],"video/vnd.dece.sd":["uvs","uvvs"],"video/vnd.dece.video":["uvv","uvvv"],"video/vnd.dvb.file":["dvb"],"video/vnd.fvt":["fvt"],"video/vnd.mpegurl":["mxu","m4u"],"video/vnd.ms-playready.media.pyv":["pyv"],"video/vnd.uvvu.mp4":["uvu","uvvu"],"video/vnd.vivo":["viv"],"video/x-f4v":["f4v"],"video/x-fli":["fli"],"video/x-flv":["flv"],"video/x-m4v":["m4v"],"video/x-matroska":["mkv","mk3d","mks"],"video/x-mng":["mng"],"video/x-ms-asf":["asf","asx"],"video/x-ms-vob":["vob"],"video/x-ms-wm":["wm"],"video/x-ms-wmv":["wmv"],"video/x-ms-wmx":["wmx"],"video/x-ms-wvx":["wvx"],"video/x-msvideo":["avi"],"video/x-sgi-movie":["movie"],"video/x-smv":["smv"],"x-conference/x-cooltalk":["ice"]}; + +/***/ }), + +/***/ 3725: +/***/ ((module) => { + +module.exports = {"application/andrew-inset":["ez"],"application/applixware":["aw"],"application/atom+xml":["atom"],"application/atomcat+xml":["atomcat"],"application/atomdeleted+xml":["atomdeleted"],"application/atomsvc+xml":["atomsvc"],"application/atsc-dwd+xml":["dwd"],"application/atsc-held+xml":["held"],"application/atsc-rsat+xml":["rsat"],"application/bdoc":["bdoc"],"application/calendar+xml":["xcs"],"application/ccxml+xml":["ccxml"],"application/cdfx+xml":["cdfx"],"application/cdmi-capability":["cdmia"],"application/cdmi-container":["cdmic"],"application/cdmi-domain":["cdmid"],"application/cdmi-object":["cdmio"],"application/cdmi-queue":["cdmiq"],"application/cu-seeme":["cu"],"application/dash+xml":["mpd"],"application/davmount+xml":["davmount"],"application/docbook+xml":["dbk"],"application/dssc+der":["dssc"],"application/dssc+xml":["xdssc"],"application/ecmascript":["es","ecma"],"application/emma+xml":["emma"],"application/emotionml+xml":["emotionml"],"application/epub+zip":["epub"],"application/exi":["exi"],"application/express":["exp"],"application/fdt+xml":["fdt"],"application/font-tdpfr":["pfr"],"application/geo+json":["geojson"],"application/gml+xml":["gml"],"application/gpx+xml":["gpx"],"application/gxf":["gxf"],"application/gzip":["gz"],"application/hjson":["hjson"],"application/hyperstudio":["stk"],"application/inkml+xml":["ink","inkml"],"application/ipfix":["ipfix"],"application/its+xml":["its"],"application/java-archive":["jar","war","ear"],"application/java-serialized-object":["ser"],"application/java-vm":["class"],"application/javascript":["js","mjs"],"application/json":["json","map"],"application/json5":["json5"],"application/jsonml+json":["jsonml"],"application/ld+json":["jsonld"],"application/lgr+xml":["lgr"],"application/lost+xml":["lostxml"],"application/mac-binhex40":["hqx"],"application/mac-compactpro":["cpt"],"application/mads+xml":["mads"],"application/manifest+json":["webmanifest"],"application/marc":["mrc"],"application/marcxml+xml":["mrcx"],"application/mathematica":["ma","nb","mb"],"application/mathml+xml":["mathml"],"application/mbox":["mbox"],"application/mediaservercontrol+xml":["mscml"],"application/metalink+xml":["metalink"],"application/metalink4+xml":["meta4"],"application/mets+xml":["mets"],"application/mmt-aei+xml":["maei"],"application/mmt-usd+xml":["musd"],"application/mods+xml":["mods"],"application/mp21":["m21","mp21"],"application/mp4":["mp4s","m4p"],"application/msword":["doc","dot"],"application/mxf":["mxf"],"application/n-quads":["nq"],"application/n-triples":["nt"],"application/node":["cjs"],"application/octet-stream":["bin","dms","lrf","mar","so","dist","distz","pkg","bpk","dump","elc","deploy","exe","dll","deb","dmg","iso","img","msi","msp","msm","buffer"],"application/oda":["oda"],"application/oebps-package+xml":["opf"],"application/ogg":["ogx"],"application/omdoc+xml":["omdoc"],"application/onenote":["onetoc","onetoc2","onetmp","onepkg"],"application/oxps":["oxps"],"application/p2p-overlay+xml":["relo"],"application/patch-ops-error+xml":["xer"],"application/pdf":["pdf"],"application/pgp-encrypted":["pgp"],"application/pgp-signature":["asc","sig"],"application/pics-rules":["prf"],"application/pkcs10":["p10"],"application/pkcs7-mime":["p7m","p7c"],"application/pkcs7-signature":["p7s"],"application/pkcs8":["p8"],"application/pkix-attr-cert":["ac"],"application/pkix-cert":["cer"],"application/pkix-crl":["crl"],"application/pkix-pkipath":["pkipath"],"application/pkixcmp":["pki"],"application/pls+xml":["pls"],"application/postscript":["ai","eps","ps"],"application/provenance+xml":["provx"],"application/pskc+xml":["pskcxml"],"application/raml+yaml":["raml"],"application/rdf+xml":["rdf","owl"],"application/reginfo+xml":["rif"],"application/relax-ng-compact-syntax":["rnc"],"application/resource-lists+xml":["rl"],"application/resource-lists-diff+xml":["rld"],"application/rls-services+xml":["rs"],"application/route-apd+xml":["rapd"],"application/route-s-tsid+xml":["sls"],"application/route-usd+xml":["rusd"],"application/rpki-ghostbusters":["gbr"],"application/rpki-manifest":["mft"],"application/rpki-roa":["roa"],"application/rsd+xml":["rsd"],"application/rss+xml":["rss"],"application/rtf":["rtf"],"application/sbml+xml":["sbml"],"application/scvp-cv-request":["scq"],"application/scvp-cv-response":["scs"],"application/scvp-vp-request":["spq"],"application/scvp-vp-response":["spp"],"application/sdp":["sdp"],"application/senml+xml":["senmlx"],"application/sensml+xml":["sensmlx"],"application/set-payment-initiation":["setpay"],"application/set-registration-initiation":["setreg"],"application/shf+xml":["shf"],"application/sieve":["siv","sieve"],"application/smil+xml":["smi","smil"],"application/sparql-query":["rq"],"application/sparql-results+xml":["srx"],"application/srgs":["gram"],"application/srgs+xml":["grxml"],"application/sru+xml":["sru"],"application/ssdl+xml":["ssdl"],"application/ssml+xml":["ssml"],"application/swid+xml":["swidtag"],"application/tei+xml":["tei","teicorpus"],"application/thraud+xml":["tfi"],"application/timestamped-data":["tsd"],"application/toml":["toml"],"application/trig":["trig"],"application/ttml+xml":["ttml"],"application/ubjson":["ubj"],"application/urc-ressheet+xml":["rsheet"],"application/urc-targetdesc+xml":["td"],"application/voicexml+xml":["vxml"],"application/wasm":["wasm"],"application/widget":["wgt"],"application/winhlp":["hlp"],"application/wsdl+xml":["wsdl"],"application/wspolicy+xml":["wspolicy"],"application/xaml+xml":["xaml"],"application/xcap-att+xml":["xav"],"application/xcap-caps+xml":["xca"],"application/xcap-diff+xml":["xdf"],"application/xcap-el+xml":["xel"],"application/xcap-ns+xml":["xns"],"application/xenc+xml":["xenc"],"application/xhtml+xml":["xhtml","xht"],"application/xliff+xml":["xlf"],"application/xml":["xml","xsl","xsd","rng"],"application/xml-dtd":["dtd"],"application/xop+xml":["xop"],"application/xproc+xml":["xpl"],"application/xslt+xml":["*xsl","xslt"],"application/xspf+xml":["xspf"],"application/xv+xml":["mxml","xhvml","xvml","xvm"],"application/yang":["yang"],"application/yin+xml":["yin"],"application/zip":["zip"],"audio/3gpp":["*3gpp"],"audio/adpcm":["adp"],"audio/amr":["amr"],"audio/basic":["au","snd"],"audio/midi":["mid","midi","kar","rmi"],"audio/mobile-xmf":["mxmf"],"audio/mp3":["*mp3"],"audio/mp4":["m4a","mp4a"],"audio/mpeg":["mpga","mp2","mp2a","mp3","m2a","m3a"],"audio/ogg":["oga","ogg","spx","opus"],"audio/s3m":["s3m"],"audio/silk":["sil"],"audio/wav":["wav"],"audio/wave":["*wav"],"audio/webm":["weba"],"audio/xm":["xm"],"font/collection":["ttc"],"font/otf":["otf"],"font/ttf":["ttf"],"font/woff":["woff"],"font/woff2":["woff2"],"image/aces":["exr"],"image/apng":["apng"],"image/avif":["avif"],"image/bmp":["bmp"],"image/cgm":["cgm"],"image/dicom-rle":["drle"],"image/emf":["emf"],"image/fits":["fits"],"image/g3fax":["g3"],"image/gif":["gif"],"image/heic":["heic"],"image/heic-sequence":["heics"],"image/heif":["heif"],"image/heif-sequence":["heifs"],"image/hej2k":["hej2"],"image/hsj2":["hsj2"],"image/ief":["ief"],"image/jls":["jls"],"image/jp2":["jp2","jpg2"],"image/jpeg":["jpeg","jpg","jpe"],"image/jph":["jph"],"image/jphc":["jhc"],"image/jpm":["jpm"],"image/jpx":["jpx","jpf"],"image/jxr":["jxr"],"image/jxra":["jxra"],"image/jxrs":["jxrs"],"image/jxs":["jxs"],"image/jxsc":["jxsc"],"image/jxsi":["jxsi"],"image/jxss":["jxss"],"image/ktx":["ktx"],"image/ktx2":["ktx2"],"image/png":["png"],"image/sgi":["sgi"],"image/svg+xml":["svg","svgz"],"image/t38":["t38"],"image/tiff":["tif","tiff"],"image/tiff-fx":["tfx"],"image/webp":["webp"],"image/wmf":["wmf"],"message/disposition-notification":["disposition-notification"],"message/global":["u8msg"],"message/global-delivery-status":["u8dsn"],"message/global-disposition-notification":["u8mdn"],"message/global-headers":["u8hdr"],"message/rfc822":["eml","mime"],"model/3mf":["3mf"],"model/gltf+json":["gltf"],"model/gltf-binary":["glb"],"model/iges":["igs","iges"],"model/mesh":["msh","mesh","silo"],"model/mtl":["mtl"],"model/obj":["obj"],"model/step+xml":["stpx"],"model/step+zip":["stpz"],"model/step-xml+zip":["stpxz"],"model/stl":["stl"],"model/vrml":["wrl","vrml"],"model/x3d+binary":["*x3db","x3dbz"],"model/x3d+fastinfoset":["x3db"],"model/x3d+vrml":["*x3dv","x3dvz"],"model/x3d+xml":["x3d","x3dz"],"model/x3d-vrml":["x3dv"],"text/cache-manifest":["appcache","manifest"],"text/calendar":["ics","ifb"],"text/coffeescript":["coffee","litcoffee"],"text/css":["css"],"text/csv":["csv"],"text/html":["html","htm","shtml"],"text/jade":["jade"],"text/jsx":["jsx"],"text/less":["less"],"text/markdown":["markdown","md"],"text/mathml":["mml"],"text/mdx":["mdx"],"text/n3":["n3"],"text/plain":["txt","text","conf","def","list","log","in","ini"],"text/richtext":["rtx"],"text/rtf":["*rtf"],"text/sgml":["sgml","sgm"],"text/shex":["shex"],"text/slim":["slim","slm"],"text/spdx":["spdx"],"text/stylus":["stylus","styl"],"text/tab-separated-values":["tsv"],"text/troff":["t","tr","roff","man","me","ms"],"text/turtle":["ttl"],"text/uri-list":["uri","uris","urls"],"text/vcard":["vcard"],"text/vtt":["vtt"],"text/xml":["*xml"],"text/yaml":["yaml","yml"],"video/3gpp":["3gp","3gpp"],"video/3gpp2":["3g2"],"video/h261":["h261"],"video/h263":["h263"],"video/h264":["h264"],"video/iso.segment":["m4s"],"video/jpeg":["jpgv"],"video/jpm":["*jpm","jpgm"],"video/mj2":["mj2","mjp2"],"video/mp2t":["ts"],"video/mp4":["mp4","mp4v","mpg4"],"video/mpeg":["mpeg","mpg","mpe","m1v","m2v"],"video/ogg":["ogv"],"video/quicktime":["qt","mov"],"video/webm":["webm"]}; + /***/ }), /***/ 3772: @@ -52518,6 +72488,175 @@ function regExpEscape (s) { } +/***/ }), + +/***/ 744: +/***/ ((module) => { + +/** + * Helpers. + */ + +var s = 1000; +var m = s * 60; +var h = m * 60; +var d = h * 24; +var w = d * 7; +var y = d * 365.25; + +/** + * Parse or format the given `val`. + * + * Options: + * + * - `long` verbose formatting [false] + * + * @param {String|Number} val + * @param {Object} [options] + * @throws {Error} throw an error if val is not a non-empty string or a number + * @return {String|Number} + * @api public + */ + +module.exports = function(val, options) { + options = options || {}; + var type = typeof val; + if (type === 'string' && val.length > 0) { + return parse(val); + } else if (type === 'number' && isFinite(val)) { + return options.long ? fmtLong(val) : fmtShort(val); + } + throw new Error( + 'val is not a non-empty string or a valid number. val=' + + JSON.stringify(val) + ); +}; + +/** + * Parse the given `str` and return milliseconds. + * + * @param {String} str + * @return {Number} + * @api private + */ + +function parse(str) { + str = String(str); + if (str.length > 100) { + return; + } + var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( + str + ); + if (!match) { + return; + } + var n = parseFloat(match[1]); + var type = (match[2] || 'ms').toLowerCase(); + switch (type) { + case 'years': + case 'year': + case 'yrs': + case 'yr': + case 'y': + return n * y; + case 'weeks': + case 'week': + case 'w': + return n * w; + case 'days': + case 'day': + case 'd': + return n * d; + case 'hours': + case 'hour': + case 'hrs': + case 'hr': + case 'h': + return n * h; + case 'minutes': + case 'minute': + case 'mins': + case 'min': + case 'm': + return n * m; + case 'seconds': + case 'second': + case 'secs': + case 'sec': + case 's': + return n * s; + case 'milliseconds': + case 'millisecond': + case 'msecs': + case 'msec': + case 'ms': + return n; + default: + return undefined; + } +} + +/** + * Short format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtShort(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return Math.round(ms / d) + 'd'; + } + if (msAbs >= h) { + return Math.round(ms / h) + 'h'; + } + if (msAbs >= m) { + return Math.round(ms / m) + 'm'; + } + if (msAbs >= s) { + return Math.round(ms / s) + 's'; + } + return ms + 'ms'; +} + +/** + * Long format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtLong(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return plural(ms, msAbs, d, 'day'); + } + if (msAbs >= h) { + return plural(ms, msAbs, h, 'hour'); + } + if (msAbs >= m) { + return plural(ms, msAbs, m, 'minute'); + } + if (msAbs >= s) { + return plural(ms, msAbs, s, 'second'); + } + return ms + ' ms'; +} + +/** + * Pluralization helper. + */ + +function plural(ms, msAbs, n, name) { + var isPlural = msAbs >= n * 1.5; + return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : ''); +} + + /***/ }), /***/ 6705: @@ -53891,10 +74030,6 @@ function getNodeRequestOptions(request) { agent = agent(parsedURL); } - if (!headers.has('Connection') && !agent) { - headers.set('Connection', 'close'); - } - // HTTP-network fetch step 4.2 // chunked encoding is handled by Node.js @@ -53943,6 +74078,20 @@ const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); }; +/** + * isSameProtocol reports whether the two provided URLs use the same protocol. + * + * Both domains must already be in canonical form. + * @param {string|URL} original + * @param {string|URL} destination + */ +const isSameProtocol = function isSameProtocol(destination, original) { + const orig = new URL$1(original).protocol; + const dest = new URL$1(destination).protocol; + + return orig === dest; +}; + /** * Fetch function * @@ -53974,7 +74123,7 @@ function fetch(url, opts) { let error = new AbortError('The user aborted a request.'); reject(error); if (request.body && request.body instanceof Stream.Readable) { - request.body.destroy(error); + destroyStream(request.body, error); } if (!response || !response.body) return; response.body.emit('error', error); @@ -54015,9 +74164,43 @@ function fetch(url, opts) { req.on('error', function (err) { reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); + + if (response && response.body) { + destroyStream(response.body, err); + } + finalize(); }); + fixResponseChunkedTransferBadEnding(req, function (err) { + if (signal && signal.aborted) { + return; + } + + if (response && response.body) { + destroyStream(response.body, err); + } + }); + + /* c8 ignore next 18 */ + if (parseInt(process.version.substring(1)) < 14) { + // Before Node.js 14, pipeline() does not fully support async iterators and does not always + // properly handle when the socket close/end events are out of order. + req.on('socket', function (s) { + s.addListener('close', function (hadError) { + // if a data listener is still present we didn't end cleanly + const hasDataListener = s.listenerCount('data') > 0; + + // if end happened before close but the socket didn't emit an error, do it now + if (response && hasDataListener && !hadError && !(signal && signal.aborted)) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + response.body.emit('error', err); + } + }); + }); + } + req.on('response', function (res) { clearTimeout(reqTimeout); @@ -54089,7 +74272,7 @@ function fetch(url, opts) { size: request.size }; - if (!isDomainOrSubdomain(request.url, locationURL)) { + if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) { for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { requestOpts.headers.delete(name); } @@ -54182,6 +74365,13 @@ function fetch(url, opts) { response = new Response(body, response_options); resolve(response); }); + raw.on('end', function () { + // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted. + if (!response) { + response = new Response(body, response_options); + resolve(response); + } + }); return; } @@ -54201,6 +74391,44 @@ function fetch(url, opts) { writeToStream(req, request); }); } +function fixResponseChunkedTransferBadEnding(request, errorCallback) { + let socket; + + request.on('socket', function (s) { + socket = s; + }); + + request.on('response', function (response) { + const headers = response.headers; + + if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) { + response.once('close', function (hadError) { + // tests for socket presence, as in some situations the + // the 'socket' event is not triggered for the request + // (happens in deno), avoids `TypeError` + // if a data listener is still present we didn't end cleanly + const hasDataListener = socket && socket.listenerCount('data') > 0; + + if (hasDataListener && !hadError) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + errorCallback(err); + } + }); + } + }); +} + +function destroyStream(stream, err) { + if (stream.destroy) { + stream.destroy(err); + } else { + // node < 8 + stream.emit('error', err); + stream.end(); + } +} + /** * Redirect code matching * @@ -54221,6 +74449,7 @@ exports.Headers = Headers; exports.Request = Request; exports.Response = Response; exports.FetchError = FetchError; +exports.AbortError = AbortError; /***/ }), @@ -56385,6 +76614,3674 @@ module.exports.implForWrapper = function (wrapper) { +/***/ }), + +/***/ 5560: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var wrappy = __nccwpck_require__(8264) +module.exports = wrappy(once) +module.exports.strict = wrappy(onceStrict) + +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true + }) + + Object.defineProperty(Function.prototype, 'onceStrict', { + value: function () { + return onceStrict(this) + }, + configurable: true + }) +}) + +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) + } + f.called = false + return f +} + +function onceStrict (fn) { + var f = function () { + if (f.called) + throw new Error(f.onceError) + f.called = true + return f.value = fn.apply(this, arguments) + } + var name = fn.name || 'Function wrapped with `once`' + f.onceError = name + " shouldn't be called more than once" + f.called = false + return f +} + + +/***/ }), + +/***/ 5500: +/***/ ((module) => { + +"use strict"; + + +const codes = {}; + +function createErrorType(code, message, Base) { + if (!Base) { + Base = Error + } + + function getMessage (arg1, arg2, arg3) { + if (typeof message === 'string') { + return message + } else { + return message(arg1, arg2, arg3) + } + } + + class NodeError extends Base { + constructor (arg1, arg2, arg3) { + super(getMessage(arg1, arg2, arg3)); + } + } + + NodeError.prototype.name = Base.name; + NodeError.prototype.code = code; + + codes[code] = NodeError; +} + +// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js +function oneOf(expected, thing) { + if (Array.isArray(expected)) { + const len = expected.length; + expected = expected.map((i) => String(i)); + if (len > 2) { + return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` + + expected[len - 1]; + } else if (len === 2) { + return `one of ${thing} ${expected[0]} or ${expected[1]}`; + } else { + return `of ${thing} ${expected[0]}`; + } + } else { + return `of ${thing} ${String(expected)}`; + } +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith +function startsWith(str, search, pos) { + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith +function endsWith(str, search, this_len) { + if (this_len === undefined || this_len > str.length) { + this_len = str.length; + } + return str.substring(this_len - search.length, this_len) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes +function includes(str, search, start) { + if (typeof start !== 'number') { + start = 0; + } + + if (start + search.length > str.length) { + return false; + } else { + return str.indexOf(search, start) !== -1; + } +} + +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { + return 'The value "' + value + '" is invalid for option "' + name + '"' +}, TypeError); +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { + // determiner: 'must be' or 'must not be' + let determiner; + if (typeof expected === 'string' && startsWith(expected, 'not ')) { + determiner = 'must not be'; + expected = expected.replace(/^not /, ''); + } else { + determiner = 'must be'; + } + + let msg; + if (endsWith(name, ' argument')) { + // For cases like 'first argument' + msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`; + } else { + const type = includes(name, '.') ? 'property' : 'argument'; + msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`; + } + + msg += `. Received type ${typeof actual}`; + return msg; +}, TypeError); +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { + return 'The ' + name + ' method is not implemented' +}); +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); +createErrorType('ERR_STREAM_DESTROYED', function (name) { + return 'Cannot call ' + name + ' after a stream was destroyed'; +}); +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { + return 'Unknown encoding: ' + arg +}, TypeError); +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); + +module.exports.F = codes; + + +/***/ }), + +/***/ 2063: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. + + + +/**/ +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) keys.push(key); + return keys; +}; +/**/ + +module.exports = Duplex; +var Readable = __nccwpck_require__(9274); +var Writable = __nccwpck_require__(8797); +__nccwpck_require__(9598)(Duplex, Readable); +{ + // Allow the keys array to be GC'ed. + var keys = objectKeys(Writable.prototype); + for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + } +} +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + Readable.call(this, options); + Writable.call(this, options); + this.allowHalfOpen = true; + if (options) { + if (options.readable === false) this.readable = false; + if (options.writable === false) this.writable = false; + if (options.allowHalfOpen === false) { + this.allowHalfOpen = false; + this.once('end', onend); + } + } +} +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); +Object.defineProperty(Duplex.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); +Object.defineProperty(Duplex.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); + +// the no-half-open enforcer +function onend() { + // If the writable side ended, then we're ok. + if (this._writableState.ended) return; + + // no more data can be written. + // But allow more writes to happen in this tick. + process.nextTick(onEndNT, this); +} +function onEndNT(self) { + self.end(); +} +Object.defineProperty(Duplex.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._readableState === undefined || this._writableState === undefined) { + return false; + } + return this._readableState.destroyed && this._writableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } +}); + +/***/ }), + +/***/ 5283: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. + + + +module.exports = PassThrough; +var Transform = __nccwpck_require__(2337); +__nccwpck_require__(9598)(PassThrough, Transform); +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + Transform.call(this, options); +} +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; + +/***/ }), + +/***/ 9274: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + +module.exports = Readable; + +/**/ +var Duplex; +/**/ + +Readable.ReadableState = ReadableState; + +/**/ +var EE = (__nccwpck_require__(4434).EventEmitter); +var EElistenerCount = function EElistenerCount(emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ +var Stream = __nccwpck_require__(3283); +/**/ + +var Buffer = (__nccwpck_require__(181).Buffer); +var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ +var debugUtil = __nccwpck_require__(9023); +var debug; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function debug() {}; +} +/**/ + +var BufferList = __nccwpck_require__(7336); +var destroyImpl = __nccwpck_require__(5089); +var _require = __nccwpck_require__(4874), + getHighWaterMark = _require.getHighWaterMark; +var _require$codes = (__nccwpck_require__(5500)/* .codes */ .F), + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; + +// Lazy loaded to improve the startup performance. +var StringDecoder; +var createReadableStreamAsyncIterator; +var from; +__nccwpck_require__(9598)(Readable, Stream); +var errorOrDestroy = destroyImpl.errorOrDestroy; +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); + + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +} +function ReadableState(options, stream, isDuplex) { + Duplex = Duplex || __nccwpck_require__(2063); + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; + + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; + + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); + + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + + // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + this.sync = true; + + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + this.paused = true; + + // Should close be emitted on destroy. Defaults to true. + this.emitClose = options.emitClose !== false; + + // Should .destroy() be called after 'end' (and potentially 'finish') + this.autoDestroy = !!options.autoDestroy; + + // has it been destroyed + this.destroyed = false; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; + + // if true, a maybeReadMore has been scheduled + this.readingMore = false; + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = (__nccwpck_require__(634)/* .StringDecoder */ .I); + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} +function Readable(options) { + Duplex = Duplex || __nccwpck_require__(2063); + if (!(this instanceof Readable)) return new Readable(options); + + // Checking for a Stream.Duplex instance is faster here instead of inside + // the ReadableState constructor, at least with V8 6.5 + var isDuplex = this instanceof Duplex; + this._readableState = new ReadableState(options, this, isDuplex); + + // legacy + this.readable = true; + if (options) { + if (typeof options.read === 'function') this._read = options.read; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + } + Stream.call(this); +} +Object.defineProperty(Readable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._readableState === undefined) { + return false; + } + return this._readableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + } +}); +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; +Readable.prototype._destroy = function (err, cb) { + cb(err); +}; + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; + } + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; + +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + debug('readableAddChunk', chunk); + var state = stream._readableState; + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + if (er) { + errorOrDestroy(stream, er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + if (addToFront) { + if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true); + } else if (state.ended) { + errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()); + } else if (state.destroyed) { + return false; + } else { + state.reading = false; + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + maybeReadMore(stream, state); + } + } + + // We can push more data if we are below the highWaterMark. + // Also, if we have no data yet, we can stand some more bytes. + // This is to work around cases where hwm=0, such as the repl. + return !state.ended && (state.length < state.highWaterMark || state.length === 0); +} +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + state.awaitDrain = 0; + stream.emit('data', chunk); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + if (state.needReadable) emitReadable(stream); + } + maybeReadMore(stream, state); +} +function chunkInvalid(state, chunk) { + var er; + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk); + } + return er; +} +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; + +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = (__nccwpck_require__(634)/* .StringDecoder */ .I); + var decoder = new StringDecoder(enc); + this._readableState.decoder = decoder; + // If setEncoding(null), decoder.encoding equals utf8 + this._readableState.encoding = this._readableState.decoder.encoding; + + // Iterate over current buffer to convert already stored Buffers: + var p = this._readableState.buffer.head; + var content = ''; + while (p !== null) { + content += decoder.write(p.data); + p = p.next; + } + this._readableState.buffer.clear(); + if (content !== '') this._readableState.buffer.push(content); + this._readableState.length = content.length; + return this; +}; + +// Don't raise the hwm > 1GB +var MAX_HWM = 0x40000000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE. + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; +} + +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + // Don't have enough + if (!state.ended) { + state.needReadable = true; + return 0; + } + return state.length; +} + +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + if (n !== 0) state.emittedReadable = false; + + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + n = howMuchToRead(n, state); + + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } + + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); + + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } + + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); + } + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + if (ret === null) { + state.needReadable = state.length <= state.highWaterMark; + n = 0; + } else { + state.length -= n; + state.awaitDrain = 0; + } + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this); + } + if (ret !== null) this.emit('data', ret); + return ret; +}; +function onEofChunk(stream, state) { + debug('onEofChunk'); + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + if (state.sync) { + // if we are sync, wait until next tick to emit the data. + // Otherwise we risk emitting data in the flow() + // the readable code triggers during a read() call + emitReadable(stream); + } else { + // emit 'readable' now to make sure it gets picked up. + state.needReadable = false; + if (!state.emittedReadable) { + state.emittedReadable = true; + emitReadable_(stream); + } + } +} + +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + debug('emitReadable', state.needReadable, state.emittedReadable); + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + process.nextTick(emitReadable_, stream); + } +} +function emitReadable_(stream) { + var state = stream._readableState; + debug('emitReadable_', state.destroyed, state.length, state.ended); + if (!state.destroyed && (state.length || state.ended)) { + stream.emit('readable'); + state.emittedReadable = false; + } + + // The stream needs another readable event if + // 1. It is not flowing, as the flow mechanism will take + // care of it. + // 2. It is not ended. + // 3. It is below the highWaterMark, so we can schedule + // another readable later. + state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark; + flow(stream); +} + +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + process.nextTick(maybeReadMore_, stream, state); + } +} +function maybeReadMore_(stream, state) { + // Attempt to read more data if we should. + // + // The conditions for reading more data are (one of): + // - Not enough data buffered (state.length < state.highWaterMark). The loop + // is responsible for filling the buffer with enough data if such data + // is available. If highWaterMark is 0 and we are not in the flowing mode + // we should _not_ attempt to buffer any extra data. We'll get more data + // when the stream consumer calls read() instead. + // - No data in the buffer, and the stream is in flowing mode. In this mode + // the loop below is responsible for ensuring read() is called. Failing to + // call read here would abort the flow and there's no other mechanism for + // continuing the flow if the stream consumer has just subscribed to the + // 'data' event. + // + // In addition to the above conditions to keep reading data, the following + // conditions prevent the data from being read: + // - The stream has ended (state.ended). + // - There is already a pending 'read' operation (state.reading). This is a + // case where the the stream has called the implementation defined _read() + // method, but they are processing the call asynchronously and have _not_ + // called push() with new data. In this case we skip performing more + // read()s. The execution ends in this method again after the _read() ends + // up calling push() with more data. + while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) { + var len = state.length; + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break; + } + state.readingMore = false; +} + +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()')); +}; +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn); + dest.on('unpipe', onunpipe); + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } + function onend() { + debug('onend'); + dest.end(); + } + + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); + cleanedUp = true; + + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + var ret = dest.write(chunk); + debug('dest.write', ret); + if (ret === false) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', state.awaitDrain); + state.awaitDrain++; + } + src.pause(); + } + } + + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er); + } + + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); + + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } + + // tell the dest that it's being piped to + dest.emit('pipe', src); + + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + return dest; +}; +function pipeOnDrain(src) { + return function pipeOnDrainFunctionResult() { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { + hasUnpiped: false + }; + + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; + + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + if (!dest) dest = state.pipes; + + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; + } + + // slow case. multiple pipe destinations. + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + for (var i = 0; i < len; i++) dests[i].emit('unpipe', this, { + hasUnpiped: false + }); + return this; + } + + // try to find the right one. + var index = indexOf(state.pipes, dest); + if (index === -1) return this; + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + dest.emit('unpipe', this, unpipeInfo); + return this; +}; + +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + var state = this._readableState; + if (ev === 'data') { + // update readableListening so that resume() may be a no-op + // a few lines down. This is needed to support once('readable'). + state.readableListening = this.listenerCount('readable') > 0; + + // Try start flowing on next tick if stream isn't explicitly paused + if (state.flowing !== false) this.resume(); + } else if (ev === 'readable') { + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.flowing = false; + state.emittedReadable = false; + debug('on readable', state.length, state.reading); + if (state.length) { + emitReadable(this); + } else if (!state.reading) { + process.nextTick(nReadingNextTick, this); + } + } + } + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; +Readable.prototype.removeListener = function (ev, fn) { + var res = Stream.prototype.removeListener.call(this, ev, fn); + if (ev === 'readable') { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + return res; +}; +Readable.prototype.removeAllListeners = function (ev) { + var res = Stream.prototype.removeAllListeners.apply(this, arguments); + if (ev === 'readable' || ev === undefined) { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + return res; +}; +function updateReadableListening(self) { + var state = self._readableState; + state.readableListening = self.listenerCount('readable') > 0; + if (state.resumeScheduled && !state.paused) { + // flowing needs to be set to true now, otherwise + // the upcoming resume will not flow. + state.flowing = true; + + // crude way to check if we should resume + } else if (self.listenerCount('data') > 0) { + self.resume(); + } +} +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} + +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + // we flow only if there is no one listening + // for readable, but we still have to call + // resume() + state.flowing = !state.readableListening; + resume(this, state); + } + state.paused = false; + return this; +}; +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + process.nextTick(resume_, stream, state); + } +} +function resume_(stream, state) { + debug('resume', state.reading); + if (!state.reading) { + stream.read(0); + } + state.resumeScheduled = false; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (this._readableState.flowing !== false) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + this._readableState.paused = true; + return this; +}; +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null); +} + +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var _this = this; + var state = this._readableState; + var paused = false; + stream.on('end', function () { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); + } + _this.push(null); + }); + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); + + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + var ret = _this.push(chunk); + if (!ret) { + paused = true; + stream.pause(); + } + }); + + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function methodWrap(method) { + return function methodWrapReturnFunction() { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } + + // proxy certain important events. + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } + + // when we try to consume some more bytes, simply unpause the + // underlying stream. + this._read = function (n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; + return this; +}; +if (typeof Symbol === 'function') { + Readable.prototype[Symbol.asyncIterator] = function () { + if (createReadableStreamAsyncIterator === undefined) { + createReadableStreamAsyncIterator = __nccwpck_require__(4868); + } + return createReadableStreamAsyncIterator(this); + }; +} +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.highWaterMark; + } +}); +Object.defineProperty(Readable.prototype, 'readableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState && this._readableState.buffer; + } +}); +Object.defineProperty(Readable.prototype, 'readableFlowing', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.flowing; + }, + set: function set(state) { + if (this._readableState) { + this._readableState.flowing = state; + } + } +}); + +// exposed for testing purposes only. +Readable._fromList = fromList; +Object.defineProperty(Readable.prototype, 'readableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.length; + } +}); + +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = state.buffer.consume(n, state.decoder); + } + return ret; +} +function endReadable(stream) { + var state = stream._readableState; + debug('endReadable', state.endEmitted); + if (!state.endEmitted) { + state.ended = true; + process.nextTick(endReadableNT, state, stream); + } +} +function endReadableNT(state, stream) { + debug('endReadableNT', state.endEmitted, state.length); + + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the writable side is ready for autoDestroy as well + var wState = stream._writableState; + if (!wState || wState.autoDestroy && wState.finished) { + stream.destroy(); + } + } + } +} +if (typeof Symbol === 'function') { + Readable.from = function (iterable, opts) { + if (from === undefined) { + from = __nccwpck_require__(4659); + } + return from(Readable, iterable, opts); + }; +} +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} + +/***/ }), + +/***/ 2337: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. + + + +module.exports = Transform; +var _require$codes = (__nccwpck_require__(5500)/* .codes */ .F), + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING, + ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0; +var Duplex = __nccwpck_require__(2063); +__nccwpck_require__(9598)(Transform, Duplex); +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + var cb = ts.writecb; + if (cb === null) { + return this.emit('error', new ERR_MULTIPLE_CALLBACK()); + } + ts.writechunk = null; + ts.writecb = null; + if (data != null) + // single equals check for both `null` and `undefined` + this.push(data); + cb(er); + var rs = this._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); + } +} +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + Duplex.call(this, options); + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; + + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; + + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + if (typeof options.flush === 'function') this._flush = options.flush; + } + + // When the writable side finishes, then flush out anything remaining. + this.on('prefinish', prefinish); +} +function prefinish() { + var _this = this; + if (typeof this._flush === 'function' && !this._readableState.destroyed) { + this._flush(function (er, data) { + done(_this, er, data); + }); + } else { + done(this, null, null); + } +} +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; + +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()')); +}; +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; + +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; + if (ts.writechunk !== null && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; +Transform.prototype._destroy = function (err, cb) { + Duplex.prototype._destroy.call(this, err, function (err2) { + cb(err2); + }); +}; +function done(stream, er, data) { + if (er) return stream.emit('error', er); + if (data != null) + // single equals check for both `null` and `undefined` + stream.push(data); + + // TODO(BridgeAR): Write a test for these two error cases + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0(); + if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING(); + return stream.push(null); +} + +/***/ }), + +/***/ 8797: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. + + + +module.exports = Writable; + +/* */ +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} + +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + var _this = this; + this.next = null; + this.entry = null; + this.finish = function () { + onCorkedFinish(_this, state); + }; +} +/* */ + +/**/ +var Duplex; +/**/ + +Writable.WritableState = WritableState; + +/**/ +var internalUtil = { + deprecate: __nccwpck_require__(4488) +}; +/**/ + +/**/ +var Stream = __nccwpck_require__(3283); +/**/ + +var Buffer = (__nccwpck_require__(181).Buffer); +var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} +var destroyImpl = __nccwpck_require__(5089); +var _require = __nccwpck_require__(4874), + getHighWaterMark = _require.getHighWaterMark; +var _require$codes = (__nccwpck_require__(5500)/* .codes */ .F), + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED, + ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES, + ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END, + ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING; +var errorOrDestroy = destroyImpl.errorOrDestroy; +__nccwpck_require__(9598)(Writable, Stream); +function nop() {} +function WritableState(options, stream, isDuplex) { + Duplex = Duplex || __nccwpck_require__(2063); + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream, + // e.g. options.readableObjectMode vs. options.writableObjectMode, etc. + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; + + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); + + // if _final has been called + this.finalCalled = false; + + // drain event flag. + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; + + // has it been destroyed + this.destroyed = false; + + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; + + // a flag to see when we're in the middle of a write. + this.writing = false; + + // when true all writes will be buffered until .uncork() call + this.corked = 0; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; + + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; + + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + + // the amount that is being written when _write is called. + this.writelen = 0; + this.bufferedRequest = null; + this.lastBufferedRequest = null; + + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; + + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; + + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; + + // Should close be emitted on destroy. Defaults to true. + this.emitClose = options.emitClose !== false; + + // Should .destroy() be called after 'finish' (and potentially 'end') + this.autoDestroy = !!options.autoDestroy; + + // count buffered requests + this.bufferedRequestCount = 0; + + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); +} +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; + } + return out; +}; +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function writableStateBufferGetter() { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); + +// Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. +var realHasInstance; +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function value(object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; + return object && object._writableState instanceof WritableState; + } + }); +} else { + realHasInstance = function realHasInstance(object) { + return object instanceof this; + }; +} +function Writable(options) { + Duplex = Duplex || __nccwpck_require__(2063); + + // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. + + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + + // Checking for a Stream.Duplex instance is faster here instead of inside + // the WritableState constructor, at least with V8 6.5 + var isDuplex = this instanceof Duplex; + if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options); + this._writableState = new WritableState(options, this, isDuplex); + + // legacy. + this.writable = true; + if (options) { + if (typeof options.write === 'function') this._write = options.write; + if (typeof options.writev === 'function') this._writev = options.writev; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + if (typeof options.final === 'function') this._final = options.final; + } + Stream.call(this); +} + +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); +}; +function writeAfterEnd(stream, cb) { + var er = new ERR_STREAM_WRITE_AFTER_END(); + // TODO: defer error events consistently everywhere, not just the cb + errorOrDestroy(stream, er); + process.nextTick(cb, er); +} + +// Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. +function validChunk(stream, state, chunk, cb) { + var er; + if (chunk === null) { + er = new ERR_STREAM_NULL_VALUES(); + } else if (typeof chunk !== 'string' && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk); + } + if (er) { + errorOrDestroy(stream, er); + process.nextTick(cb, er); + return false; + } + return true; +} +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + var isBuf = !state.objectMode && _isUint8Array(chunk); + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + if (typeof cb !== 'function') cb = nop; + if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } + return ret; +}; +Writable.prototype.cork = function () { + this._writableState.corked++; +}; +Writable.prototype.uncork = function () { + var state = this._writableState; + if (state.corked) { + state.corked--; + if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; +Object.defineProperty(Writable.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + return chunk; +} +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); + +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; + } + } + var len = state.objectMode ? 1 : chunk.length; + state.length += len; + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk: chunk, + encoding: encoding, + isBuf: isBuf, + callback: cb, + next: null + }; + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + return ret; +} +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + process.nextTick(cb, er); + // this can emit finish, and it will always happen + // after error + process.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); + // this can emit finish, but finish must + // always follow error + finishMaybe(stream, state); + } +} +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK(); + onwriteStateUpdate(state); + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state) || stream.destroyed; + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + if (sync) { + process.nextTick(afterWrite, stream, state, finished, cb); + } else { + afterWrite(stream, state, finished, cb); + } + } +} +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} + +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} + +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + var count = 0; + var allBuffers = true; + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + buffer.allBuffers = allBuffers; + doWrite(stream, state, true, state.length, buffer, '', holder.finish); + + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } + } + if (entry === null) state.lastBufferedRequest = null; + } + state.bufferedRequest = entry; + state.bufferProcessing = false; +} +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()')); +}; +Writable.prototype._writev = null; +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); + + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } + + // ignore unnecessary end() calls. + if (!state.ending) endWritable(this, state, cb); + return this; +}; +Object.defineProperty(Writable.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} +function callFinal(stream, state) { + stream._final(function (err) { + state.pendingcb--; + if (err) { + errorOrDestroy(stream, err); + } + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); +} +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function' && !state.destroyed) { + state.pendingcb++; + state.finalCalled = true; + process.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); + } + } +} +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + prefinish(stream, state); + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the readable side is ready for autoDestroy as well + var rState = stream._readableState; + if (!rState || rState.autoDestroy && rState.endEmitted) { + stream.destroy(); + } + } + } + } + return need; +} +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) process.nextTick(cb);else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; +} +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } + + // reuse the free corkReq. + state.corkedRequestsFree.next = corkReq; +} +Object.defineProperty(Writable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._writableState === undefined) { + return false; + } + return this._writableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._writableState.destroyed = value; + } +}); +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; +Writable.prototype._destroy = function (err, cb) { + cb(err); +}; + +/***/ }), + +/***/ 4868: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var _Object$setPrototypeO; +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var finished = __nccwpck_require__(6815); +var kLastResolve = Symbol('lastResolve'); +var kLastReject = Symbol('lastReject'); +var kError = Symbol('error'); +var kEnded = Symbol('ended'); +var kLastPromise = Symbol('lastPromise'); +var kHandlePromise = Symbol('handlePromise'); +var kStream = Symbol('stream'); +function createIterResult(value, done) { + return { + value: value, + done: done + }; +} +function readAndResolve(iter) { + var resolve = iter[kLastResolve]; + if (resolve !== null) { + var data = iter[kStream].read(); + // we defer if data is null + // we can be expecting either 'end' or + // 'error' + if (data !== null) { + iter[kLastPromise] = null; + iter[kLastResolve] = null; + iter[kLastReject] = null; + resolve(createIterResult(data, false)); + } + } +} +function onReadable(iter) { + // we wait for the next tick, because it might + // emit an error with process.nextTick + process.nextTick(readAndResolve, iter); +} +function wrapForNext(lastPromise, iter) { + return function (resolve, reject) { + lastPromise.then(function () { + if (iter[kEnded]) { + resolve(createIterResult(undefined, true)); + return; + } + iter[kHandlePromise](resolve, reject); + }, reject); + }; +} +var AsyncIteratorPrototype = Object.getPrototypeOf(function () {}); +var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = { + get stream() { + return this[kStream]; + }, + next: function next() { + var _this = this; + // if we have detected an error in the meanwhile + // reject straight away + var error = this[kError]; + if (error !== null) { + return Promise.reject(error); + } + if (this[kEnded]) { + return Promise.resolve(createIterResult(undefined, true)); + } + if (this[kStream].destroyed) { + // We need to defer via nextTick because if .destroy(err) is + // called, the error will be emitted via nextTick, and + // we cannot guarantee that there is no error lingering around + // waiting to be emitted. + return new Promise(function (resolve, reject) { + process.nextTick(function () { + if (_this[kError]) { + reject(_this[kError]); + } else { + resolve(createIterResult(undefined, true)); + } + }); + }); + } + + // if we have multiple next() calls + // we will wait for the previous Promise to finish + // this logic is optimized to support for await loops, + // where next() is only called once at a time + var lastPromise = this[kLastPromise]; + var promise; + if (lastPromise) { + promise = new Promise(wrapForNext(lastPromise, this)); + } else { + // fast path needed to support multiple this.push() + // without triggering the next() queue + var data = this[kStream].read(); + if (data !== null) { + return Promise.resolve(createIterResult(data, false)); + } + promise = new Promise(this[kHandlePromise]); + } + this[kLastPromise] = promise; + return promise; + } +}, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () { + return this; +}), _defineProperty(_Object$setPrototypeO, "return", function _return() { + var _this2 = this; + // destroy(err, cb) is a private API + // we can guarantee we have that here, because we control the + // Readable class this is attached to + return new Promise(function (resolve, reject) { + _this2[kStream].destroy(null, function (err) { + if (err) { + reject(err); + return; + } + resolve(createIterResult(undefined, true)); + }); + }); +}), _Object$setPrototypeO), AsyncIteratorPrototype); +var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) { + var _Object$create; + var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, { + value: stream, + writable: true + }), _defineProperty(_Object$create, kLastResolve, { + value: null, + writable: true + }), _defineProperty(_Object$create, kLastReject, { + value: null, + writable: true + }), _defineProperty(_Object$create, kError, { + value: null, + writable: true + }), _defineProperty(_Object$create, kEnded, { + value: stream._readableState.endEmitted, + writable: true + }), _defineProperty(_Object$create, kHandlePromise, { + value: function value(resolve, reject) { + var data = iterator[kStream].read(); + if (data) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(data, false)); + } else { + iterator[kLastResolve] = resolve; + iterator[kLastReject] = reject; + } + }, + writable: true + }), _Object$create)); + iterator[kLastPromise] = null; + finished(stream, function (err) { + if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { + var reject = iterator[kLastReject]; + // reject if we are waiting for data in the Promise + // returned by next() and store the error + if (reject !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + reject(err); + } + iterator[kError] = err; + return; + } + var resolve = iterator[kLastResolve]; + if (resolve !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(undefined, true)); + } + iterator[kEnded] = true; + }); + stream.on('readable', onReadable.bind(null, iterator)); + return iterator; +}; +module.exports = createReadableStreamAsyncIterator; + +/***/ }), + +/***/ 7336: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } +function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, _toPropertyKey(descriptor.key), descriptor); } } +function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); Object.defineProperty(Constructor, "prototype", { writable: false }); return Constructor; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var _require = __nccwpck_require__(181), + Buffer = _require.Buffer; +var _require2 = __nccwpck_require__(9023), + inspect = _require2.inspect; +var custom = inspect && inspect.custom || 'inspect'; +function copyBuffer(src, target, offset) { + Buffer.prototype.copy.call(src, target, offset); +} +module.exports = /*#__PURE__*/function () { + function BufferList() { + _classCallCheck(this, BufferList); + this.head = null; + this.tail = null; + this.length = 0; + } + _createClass(BufferList, [{ + key: "push", + value: function push(v) { + var entry = { + data: v, + next: null + }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + } + }, { + key: "unshift", + value: function unshift(v) { + var entry = { + data: v, + next: this.head + }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + } + }, { + key: "shift", + value: function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + } + }, { + key: "clear", + value: function clear() { + this.head = this.tail = null; + this.length = 0; + } + }, { + key: "join", + value: function join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) ret += s + p.data; + return ret; + } + }, { + key: "concat", + value: function concat(n) { + if (this.length === 0) return Buffer.alloc(0); + var ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + return ret; + } + + // Consumes a specified amount of bytes or characters from the buffered data. + }, { + key: "consume", + value: function consume(n, hasStrings) { + var ret; + if (n < this.head.data.length) { + // `slice` is the same for buffers and strings. + ret = this.head.data.slice(0, n); + this.head.data = this.head.data.slice(n); + } else if (n === this.head.data.length) { + // First chunk is a perfect match. + ret = this.shift(); + } else { + // Result spans more than one buffer. + ret = hasStrings ? this._getString(n) : this._getBuffer(n); + } + return ret; + } + }, { + key: "first", + value: function first() { + return this.head.data; + } + + // Consumes a specified amount of characters from the buffered data. + }, { + key: "_getString", + value: function _getString(n) { + var p = this.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = str.slice(nb); + } + break; + } + ++c; + } + this.length -= c; + return ret; + } + + // Consumes a specified amount of bytes from the buffered data. + }, { + key: "_getBuffer", + value: function _getBuffer(n) { + var ret = Buffer.allocUnsafe(n); + var p = this.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = buf.slice(nb); + } + break; + } + ++c; + } + this.length -= c; + return ret; + } + + // Make sure the linked list only shows the minimal necessary information. + }, { + key: custom, + value: function value(_, options) { + return inspect(this, _objectSpread(_objectSpread({}, options), {}, { + // Only inspect one level. + depth: 0, + // It should not recurse. + customInspect: false + })); + } + }]); + return BufferList; +}(); + +/***/ }), + +/***/ 5089: +/***/ ((module) => { + +"use strict"; + + +// undocumented cb() API, needed for core, not for public API +function destroy(err, cb) { + var _this = this; + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err) { + if (!this._writableState) { + process.nextTick(emitErrorNT, this, err); + } else if (!this._writableState.errorEmitted) { + this._writableState.errorEmitted = true; + process.nextTick(emitErrorNT, this, err); + } + } + return this; + } + + // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks + + if (this._readableState) { + this._readableState.destroyed = true; + } + + // if this is a duplex stream mark the writable part as destroyed as well + if (this._writableState) { + this._writableState.destroyed = true; + } + this._destroy(err || null, function (err) { + if (!cb && err) { + if (!_this._writableState) { + process.nextTick(emitErrorAndCloseNT, _this, err); + } else if (!_this._writableState.errorEmitted) { + _this._writableState.errorEmitted = true; + process.nextTick(emitErrorAndCloseNT, _this, err); + } else { + process.nextTick(emitCloseNT, _this); + } + } else if (cb) { + process.nextTick(emitCloseNT, _this); + cb(err); + } else { + process.nextTick(emitCloseNT, _this); + } + }); + return this; +} +function emitErrorAndCloseNT(self, err) { + emitErrorNT(self, err); + emitCloseNT(self); +} +function emitCloseNT(self) { + if (self._writableState && !self._writableState.emitClose) return; + if (self._readableState && !self._readableState.emitClose) return; + self.emit('close'); +} +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; + } + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finalCalled = false; + this._writableState.prefinished = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; + } +} +function emitErrorNT(self, err) { + self.emit('error', err); +} +function errorOrDestroy(stream, err) { + // We have tests that rely on errors being emitted + // in the same tick, so changing this is semver major. + // For now when you opt-in to autoDestroy we allow + // the error to be emitted nextTick. In a future + // semver major update we should change the default to this. + + var rState = stream._readableState; + var wState = stream._writableState; + if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err); +} +module.exports = { + destroy: destroy, + undestroy: undestroy, + errorOrDestroy: errorOrDestroy +}; + +/***/ }), + +/***/ 6815: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Ported from https://github.com/mafintosh/end-of-stream with +// permission from the author, Mathias Buus (@mafintosh). + + + +var ERR_STREAM_PREMATURE_CLOSE = (__nccwpck_require__(5500)/* .codes */ .F).ERR_STREAM_PREMATURE_CLOSE; +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { + args[_key] = arguments[_key]; + } + callback.apply(this, args); + }; +} +function noop() {} +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} +function eos(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + callback = once(callback || noop); + var readable = opts.readable || opts.readable !== false && stream.readable; + var writable = opts.writable || opts.writable !== false && stream.writable; + var onlegacyfinish = function onlegacyfinish() { + if (!stream.writable) onfinish(); + }; + var writableEnded = stream._writableState && stream._writableState.finished; + var onfinish = function onfinish() { + writable = false; + writableEnded = true; + if (!readable) callback.call(stream); + }; + var readableEnded = stream._readableState && stream._readableState.endEmitted; + var onend = function onend() { + readable = false; + readableEnded = true; + if (!writable) callback.call(stream); + }; + var onerror = function onerror(err) { + callback.call(stream, err); + }; + var onclose = function onclose() { + var err; + if (readable && !readableEnded) { + if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + if (writable && !writableEnded) { + if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + }; + var onrequest = function onrequest() { + stream.req.on('finish', onfinish); + }; + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest();else stream.on('request', onrequest); + } else if (writable && !stream._writableState) { + // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + return function () { + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +} +module.exports = eos; + +/***/ }), + +/***/ 4659: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } +function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var ERR_INVALID_ARG_TYPE = (__nccwpck_require__(5500)/* .codes */ .F).ERR_INVALID_ARG_TYPE; +function from(Readable, iterable, opts) { + var iterator; + if (iterable && typeof iterable.next === 'function') { + iterator = iterable; + } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable); + var readable = new Readable(_objectSpread({ + objectMode: true + }, opts)); + // Reading boolean to protect against _read + // being called before last iteration completion. + var reading = false; + readable._read = function () { + if (!reading) { + reading = true; + next(); + } + }; + function next() { + return _next2.apply(this, arguments); + } + function _next2() { + _next2 = _asyncToGenerator(function* () { + try { + var _yield$iterator$next = yield iterator.next(), + value = _yield$iterator$next.value, + done = _yield$iterator$next.done; + if (done) { + readable.push(null); + } else if (readable.push(yield value)) { + next(); + } else { + reading = false; + } + } catch (err) { + readable.destroy(err); + } + }); + return _next2.apply(this, arguments); + } + return readable; +} +module.exports = from; + + +/***/ }), + +/***/ 6701: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Ported from https://github.com/mafintosh/pump with +// permission from the author, Mathias Buus (@mafintosh). + + + +var eos; +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + callback.apply(void 0, arguments); + }; +} +var _require$codes = (__nccwpck_require__(5500)/* .codes */ .F), + ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED; +function noop(err) { + // Rethrow the error if it exists to avoid swallowing it + if (err) throw err; +} +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} +function destroyer(stream, reading, writing, callback) { + callback = once(callback); + var closed = false; + stream.on('close', function () { + closed = true; + }); + if (eos === undefined) eos = __nccwpck_require__(6815); + eos(stream, { + readable: reading, + writable: writing + }, function (err) { + if (err) return callback(err); + closed = true; + callback(); + }); + var destroyed = false; + return function (err) { + if (closed) return; + if (destroyed) return; + destroyed = true; + + // request.destroy just do .end - .abort is what we want + if (isRequest(stream)) return stream.abort(); + if (typeof stream.destroy === 'function') return stream.destroy(); + callback(err || new ERR_STREAM_DESTROYED('pipe')); + }; +} +function call(fn) { + fn(); +} +function pipe(from, to) { + return from.pipe(to); +} +function popCallback(streams) { + if (!streams.length) return noop; + if (typeof streams[streams.length - 1] !== 'function') return noop; + return streams.pop(); +} +function pipeline() { + for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) { + streams[_key] = arguments[_key]; + } + var callback = popCallback(streams); + if (Array.isArray(streams[0])) streams = streams[0]; + if (streams.length < 2) { + throw new ERR_MISSING_ARGS('streams'); + } + var error; + var destroys = streams.map(function (stream, i) { + var reading = i < streams.length - 1; + var writing = i > 0; + return destroyer(stream, reading, writing, function (err) { + if (!error) error = err; + if (err) destroys.forEach(call); + if (reading) return; + destroys.forEach(call); + callback(error); + }); + }); + return streams.reduce(pipe); +} +module.exports = pipeline; + +/***/ }), + +/***/ 4874: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var ERR_INVALID_OPT_VALUE = (__nccwpck_require__(5500)/* .codes */ .F).ERR_INVALID_OPT_VALUE; +function highWaterMarkFrom(options, isDuplex, duplexKey) { + return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null; +} +function getHighWaterMark(state, options, duplexKey, isDuplex) { + var hwm = highWaterMarkFrom(options, isDuplex, duplexKey); + if (hwm != null) { + if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) { + var name = isDuplex ? duplexKey : 'highWaterMark'; + throw new ERR_INVALID_OPT_VALUE(name, hwm); + } + return Math.floor(hwm); + } + + // Default value + return state.objectMode ? 16 : 16 * 1024; +} +module.exports = { + getHighWaterMark: getHighWaterMark +}; + +/***/ }), + +/***/ 3283: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +module.exports = __nccwpck_require__(2203); + + +/***/ }), + +/***/ 6131: +/***/ ((module, exports, __nccwpck_require__) => { + +var Stream = __nccwpck_require__(2203); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream.Readable; + Object.assign(module.exports, Stream); + module.exports.Stream = Stream; +} else { + exports = module.exports = __nccwpck_require__(9274); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = __nccwpck_require__(8797); + exports.Duplex = __nccwpck_require__(2063); + exports.Transform = __nccwpck_require__(2337); + exports.PassThrough = __nccwpck_require__(5283); + exports.finished = __nccwpck_require__(6815); + exports.pipeline = __nccwpck_require__(6701); +} + + +/***/ }), + +/***/ 7842: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const {PassThrough} = __nccwpck_require__(2203); +const extend = __nccwpck_require__(3860); + +let debug = () => {}; +if ( + typeof process !== 'undefined' && + 'env' in process && + typeof process.env === 'object' && + process.env.DEBUG === 'retry-request' +) { + debug = message => { + console.log('retry-request:', message); + }; +} + +const DEFAULTS = { + objectMode: false, + retries: 2, + + /* + The maximum time to delay in seconds. If retryDelayMultiplier results in a + delay greater than maxRetryDelay, retries should delay by maxRetryDelay + seconds instead. + */ + maxRetryDelay: 64, + + /* + The multiplier by which to increase the delay time between the completion of + failed requests, and the initiation of the subsequent retrying request. + */ + retryDelayMultiplier: 2, + + /* + The length of time to keep retrying in seconds. The last sleep period will + be shortened as necessary, so that the last retry runs at deadline (and not + considerably beyond it). The total time starting from when the initial + request is sent, after which an error will be returned, regardless of the + retrying attempts made meanwhile. + */ + totalTimeout: 600, + + noResponseRetries: 2, + currentRetryAttempt: 0, + shouldRetryFn: function (response) { + const retryRanges = [ + // https://en.wikipedia.org/wiki/List_of_HTTP_status_codes + // 1xx - Retry (Informational, request still processing) + // 2xx - Do not retry (Success) + // 3xx - Do not retry (Redirect) + // 4xx - Do not retry (Client errors) + // 429 - Retry ("Too Many Requests") + // 5xx - Retry (Server errors) + [100, 199], + [429, 429], + [500, 599], + ]; + + const statusCode = response.statusCode; + debug(`Response status: ${statusCode}`); + + let range; + while ((range = retryRanges.shift())) { + if (statusCode >= range[0] && statusCode <= range[1]) { + // Not a successful status or redirect. + return true; + } + } + }, +}; + +function retryRequest(requestOpts, opts, callback) { + if (typeof requestOpts === 'string') { + requestOpts = {url: requestOpts}; + } + + const streamMode = typeof arguments[arguments.length - 1] !== 'function'; + + if (typeof opts === 'function') { + callback = opts; + } + + const manualCurrentRetryAttemptWasSet = + opts && typeof opts.currentRetryAttempt === 'number'; + opts = extend({}, DEFAULTS, opts); + + if (typeof opts.request === 'undefined') { + throw new Error('A request library must be provided to retry-request.'); + } + + let currentRetryAttempt = opts.currentRetryAttempt; + + let numNoResponseAttempts = 0; + let streamResponseHandled = false; + + let retryStream; + let requestStream; + let delayStream; + + let activeRequest; + const retryRequest = { + abort: function () { + if (activeRequest && activeRequest.abort) { + activeRequest.abort(); + } + }, + }; + + if (streamMode) { + retryStream = new PassThrough({objectMode: opts.objectMode}); + retryStream.abort = resetStreams; + } + + const timeOfFirstRequest = Date.now(); + if (currentRetryAttempt > 0) { + retryAfterDelay(currentRetryAttempt); + } else { + makeRequest(); + } + + if (streamMode) { + return retryStream; + } else { + return retryRequest; + } + + function resetStreams() { + delayStream = null; + + if (requestStream) { + requestStream.abort && requestStream.abort(); + requestStream.cancel && requestStream.cancel(); + + if (requestStream.destroy) { + requestStream.destroy(); + } else if (requestStream.end) { + requestStream.end(); + } + } + } + + function makeRequest() { + let finishHandled = false; + currentRetryAttempt++; + debug(`Current retry attempt: ${currentRetryAttempt}`); + + function handleFinish(args = []) { + if (!finishHandled) { + finishHandled = true; + retryStream.emit('complete', ...args); + } + } + + if (streamMode) { + streamResponseHandled = false; + + delayStream = new PassThrough({objectMode: opts.objectMode}); + requestStream = opts.request(requestOpts); + + setImmediate(() => { + retryStream.emit('request'); + }); + + requestStream + // gRPC via google-cloud-node can emit an `error` as well as a `response` + // Whichever it emits, we run with-- we can't run with both. That's what + // is up with the `streamResponseHandled` tracking. + .on('error', err => { + if (streamResponseHandled) { + return; + } + + streamResponseHandled = true; + onResponse(err); + }) + .on('response', (resp, body) => { + if (streamResponseHandled) { + return; + } + + streamResponseHandled = true; + onResponse(null, resp, body); + }) + .on('complete', (...params) => handleFinish(params)) + .on('finish', (...params) => handleFinish(params)); + + requestStream.pipe(delayStream); + } else { + activeRequest = opts.request(requestOpts, onResponse); + } + } + + function retryAfterDelay(currentRetryAttempt) { + if (streamMode) { + resetStreams(); + } + + const nextRetryDelay = getNextRetryDelay({ + maxRetryDelay: opts.maxRetryDelay, + retryDelayMultiplier: opts.retryDelayMultiplier, + retryNumber: currentRetryAttempt, + timeOfFirstRequest, + totalTimeout: opts.totalTimeout, + }); + debug(`Next retry delay: ${nextRetryDelay}`); + + if (nextRetryDelay <= 0) { + numNoResponseAttempts = opts.noResponseRetries + 1; + return; + } + + setTimeout(makeRequest, nextRetryDelay); + } + + function onResponse(err, response, body) { + // An error such as DNS resolution. + if (err) { + numNoResponseAttempts++; + + if (numNoResponseAttempts <= opts.noResponseRetries) { + retryAfterDelay(numNoResponseAttempts); + } else { + if (streamMode) { + retryStream.emit('error', err); + retryStream.end(); + } else { + callback(err, response, body); + } + } + + return; + } + + // Send the response to see if we should try again. + // NOTE: "currentRetryAttempt" isn't accurate by default, as it counts + // the very first request sent as the first "retry". It is only accurate + // when a user provides their own "currentRetryAttempt" option at + // instantiation. + const adjustedCurrentRetryAttempt = manualCurrentRetryAttemptWasSet + ? currentRetryAttempt + : currentRetryAttempt - 1; + if ( + adjustedCurrentRetryAttempt < opts.retries && + opts.shouldRetryFn(response) + ) { + retryAfterDelay(currentRetryAttempt); + return; + } + + // No more attempts need to be made, just continue on. + if (streamMode) { + retryStream.emit('response', response); + delayStream.pipe(retryStream); + requestStream.on('error', err => { + retryStream.destroy(err); + }); + } else { + callback(err, response, body); + } + } +} + +module.exports = retryRequest; + +function getNextRetryDelay(config) { + const { + maxRetryDelay, + retryDelayMultiplier, + retryNumber, + timeOfFirstRequest, + totalTimeout, + } = config; + + const maxRetryDelayMs = maxRetryDelay * 1000; + const totalTimeoutMs = totalTimeout * 1000; + + const jitter = Math.floor(Math.random() * 1000); + const calculatedNextRetryDelay = + Math.pow(retryDelayMultiplier, retryNumber) * 1000 + jitter; + + const maxAllowableDelayMs = + totalTimeoutMs - (Date.now() - timeOfFirstRequest); + + return Math.min( + calculatedNextRetryDelay, + maxAllowableDelayMs, + maxRetryDelayMs + ); +} + +module.exports.defaults = DEFAULTS; +module.exports.getNextRetryDelay = getNextRetryDelay; + + +/***/ }), + +/***/ 5546: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +module.exports = __nccwpck_require__(7084); + +/***/ }), + +/***/ 7084: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +var RetryOperation = __nccwpck_require__(9538); + +exports.operation = function(options) { + var timeouts = exports.timeouts(options); + return new RetryOperation(timeouts, { + forever: options && (options.forever || options.retries === Infinity), + unref: options && options.unref, + maxRetryTime: options && options.maxRetryTime + }); +}; + +exports.timeouts = function(options) { + if (options instanceof Array) { + return [].concat(options); + } + + var opts = { + retries: 10, + factor: 2, + minTimeout: 1 * 1000, + maxTimeout: Infinity, + randomize: false + }; + for (var key in options) { + opts[key] = options[key]; + } + + if (opts.minTimeout > opts.maxTimeout) { + throw new Error('minTimeout is greater than maxTimeout'); + } + + var timeouts = []; + for (var i = 0; i < opts.retries; i++) { + timeouts.push(this.createTimeout(i, opts)); + } + + if (options && options.forever && !timeouts.length) { + timeouts.push(this.createTimeout(i, opts)); + } + + // sort the array numerically ascending + timeouts.sort(function(a,b) { + return a - b; + }); + + return timeouts; +}; + +exports.createTimeout = function(attempt, opts) { + var random = (opts.randomize) + ? (Math.random() + 1) + : 1; + + var timeout = Math.round(random * Math.max(opts.minTimeout, 1) * Math.pow(opts.factor, attempt)); + timeout = Math.min(timeout, opts.maxTimeout); + + return timeout; +}; + +exports.wrap = function(obj, options, methods) { + if (options instanceof Array) { + methods = options; + options = null; + } + + if (!methods) { + methods = []; + for (var key in obj) { + if (typeof obj[key] === 'function') { + methods.push(key); + } + } + } + + for (var i = 0; i < methods.length; i++) { + var method = methods[i]; + var original = obj[method]; + + obj[method] = function retryWrapper(original) { + var op = exports.operation(options); + var args = Array.prototype.slice.call(arguments, 1); + var callback = args.pop(); + + args.push(function(err) { + if (op.retry(err)) { + return; + } + if (err) { + arguments[0] = op.mainError(); + } + callback.apply(this, arguments); + }); + + op.attempt(function() { + original.apply(obj, args); + }); + }.bind(obj, original); + obj[method].options = options; + } +}; + + +/***/ }), + +/***/ 9538: +/***/ ((module) => { + +function RetryOperation(timeouts, options) { + // Compatibility for the old (timeouts, retryForever) signature + if (typeof options === 'boolean') { + options = { forever: options }; + } + + this._originalTimeouts = JSON.parse(JSON.stringify(timeouts)); + this._timeouts = timeouts; + this._options = options || {}; + this._maxRetryTime = options && options.maxRetryTime || Infinity; + this._fn = null; + this._errors = []; + this._attempts = 1; + this._operationTimeout = null; + this._operationTimeoutCb = null; + this._timeout = null; + this._operationStart = null; + this._timer = null; + + if (this._options.forever) { + this._cachedTimeouts = this._timeouts.slice(0); + } +} +module.exports = RetryOperation; + +RetryOperation.prototype.reset = function() { + this._attempts = 1; + this._timeouts = this._originalTimeouts.slice(0); +} + +RetryOperation.prototype.stop = function() { + if (this._timeout) { + clearTimeout(this._timeout); + } + if (this._timer) { + clearTimeout(this._timer); + } + + this._timeouts = []; + this._cachedTimeouts = null; +}; + +RetryOperation.prototype.retry = function(err) { + if (this._timeout) { + clearTimeout(this._timeout); + } + + if (!err) { + return false; + } + var currentTime = new Date().getTime(); + if (err && currentTime - this._operationStart >= this._maxRetryTime) { + this._errors.push(err); + this._errors.unshift(new Error('RetryOperation timeout occurred')); + return false; + } + + this._errors.push(err); + + var timeout = this._timeouts.shift(); + if (timeout === undefined) { + if (this._cachedTimeouts) { + // retry forever, only keep last error + this._errors.splice(0, this._errors.length - 1); + timeout = this._cachedTimeouts.slice(-1); + } else { + return false; + } + } + + var self = this; + this._timer = setTimeout(function() { + self._attempts++; + + if (self._operationTimeoutCb) { + self._timeout = setTimeout(function() { + self._operationTimeoutCb(self._attempts); + }, self._operationTimeout); + + if (self._options.unref) { + self._timeout.unref(); + } + } + + self._fn(self._attempts); + }, timeout); + + if (this._options.unref) { + this._timer.unref(); + } + + return true; +}; + +RetryOperation.prototype.attempt = function(fn, timeoutOps) { + this._fn = fn; + + if (timeoutOps) { + if (timeoutOps.timeout) { + this._operationTimeout = timeoutOps.timeout; + } + if (timeoutOps.cb) { + this._operationTimeoutCb = timeoutOps.cb; + } + } + + var self = this; + if (this._operationTimeoutCb) { + this._timeout = setTimeout(function() { + self._operationTimeoutCb(); + }, self._operationTimeout); + } + + this._operationStart = new Date().getTime(); + + this._fn(this._attempts); +}; + +RetryOperation.prototype.try = function(fn) { + console.log('Using RetryOperation.try() is deprecated'); + this.attempt(fn); +}; + +RetryOperation.prototype.start = function(fn) { + console.log('Using RetryOperation.start() is deprecated'); + this.attempt(fn); +}; + +RetryOperation.prototype.start = RetryOperation.prototype.try; + +RetryOperation.prototype.errors = function() { + return this._errors; +}; + +RetryOperation.prototype.attempts = function() { + return this._attempts; +}; + +RetryOperation.prototype.mainError = function() { + if (this._errors.length === 0) { + return null; + } + + var counts = {}; + var mainError = null; + var mainErrorCount = 0; + + for (var i = 0; i < this._errors.length; i++) { + var error = this._errors[i]; + var message = error.message; + var count = (counts[message] || 0) + 1; + + counts[message] = count; + + if (count >= mainErrorCount) { + mainError = error; + mainErrorCount = count; + } + } + + return mainError; +}; + + +/***/ }), + +/***/ 3058: +/***/ ((module, exports, __nccwpck_require__) => { + +/* eslint-disable node/no-deprecated-api */ +var buffer = __nccwpck_require__(181) +var Buffer = buffer.Buffer + +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } +} +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer +} + +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) +} + +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) + +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') + } + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + } else { + buf.fill(0) + } + return buf +} + +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) +} + +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return buffer.SlowBuffer(size) +} + + /***/ }), /***/ 2560: @@ -59607,6 +83504,2463 @@ function coerce (version, options) { } +/***/ }), + +/***/ 1546: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var stubs = __nccwpck_require__(3897) + +/* + * StreamEvents can be used 2 ways: + * + * 1: + * function MyStream() { + * require('stream-events').call(this) + * } + * + * 2: + * require('stream-events')(myStream) + */ +function StreamEvents(stream) { + stream = stream || this + + var cfg = { + callthrough: true, + calls: 1 + } + + stubs(stream, '_read', cfg, stream.emit.bind(stream, 'reading')) + stubs(stream, '_write', cfg, stream.emit.bind(stream, 'writing')) + + return stream +} + +module.exports = StreamEvents + + +/***/ }), + +/***/ 4633: +/***/ ((module) => { + +module.exports = shift + +function shift (stream) { + var rs = stream._readableState + if (!rs) return null + return (rs.objectMode || typeof stream._duplexState === 'number') ? stream.read() : stream.read(getStateLength(rs)) +} + +function getStateLength (state) { + if (state.buffer.length) { + var idx = state.bufferIndex || 0 + // Since node 6.3.0 state.buffer is a BufferList not an array + if (state.buffer.head) { + return state.buffer.head.data.length + } else if (state.buffer.length - idx > 0 && state.buffer[idx]) { + return state.buffer[idx].length + } + } + + return state.length +} + + +/***/ }), + +/***/ 634: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + +/**/ + +var Buffer = (__nccwpck_require__(6132).Buffer); +/**/ + +var isEncoding = Buffer.isEncoding || function (encoding) { + encoding = '' + encoding; + switch (encoding && encoding.toLowerCase()) { + case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': + return true; + default: + return false; + } +}; + +function _normalizeEncoding(enc) { + if (!enc) return 'utf8'; + var retried; + while (true) { + switch (enc) { + case 'utf8': + case 'utf-8': + return 'utf8'; + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return 'utf16le'; + case 'latin1': + case 'binary': + return 'latin1'; + case 'base64': + case 'ascii': + case 'hex': + return enc; + default: + if (retried) return; // undefined + enc = ('' + enc).toLowerCase(); + retried = true; + } + } +}; + +// Do not cache `Buffer.isEncoding` when checking encoding names as some +// modules monkey-patch it to support additional encodings +function normalizeEncoding(enc) { + var nenc = _normalizeEncoding(enc); + if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); + return nenc || enc; +} + +// StringDecoder provides an interface for efficiently splitting a series of +// buffers into a series of JS strings without breaking apart multi-byte +// characters. +exports.I = StringDecoder; +function StringDecoder(encoding) { + this.encoding = normalizeEncoding(encoding); + var nb; + switch (this.encoding) { + case 'utf16le': + this.text = utf16Text; + this.end = utf16End; + nb = 4; + break; + case 'utf8': + this.fillLast = utf8FillLast; + nb = 4; + break; + case 'base64': + this.text = base64Text; + this.end = base64End; + nb = 3; + break; + default: + this.write = simpleWrite; + this.end = simpleEnd; + return; + } + this.lastNeed = 0; + this.lastTotal = 0; + this.lastChar = Buffer.allocUnsafe(nb); +} + +StringDecoder.prototype.write = function (buf) { + if (buf.length === 0) return ''; + var r; + var i; + if (this.lastNeed) { + r = this.fillLast(buf); + if (r === undefined) return ''; + i = this.lastNeed; + this.lastNeed = 0; + } else { + i = 0; + } + if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); + return r || ''; +}; + +StringDecoder.prototype.end = utf8End; + +// Returns only complete characters in a Buffer +StringDecoder.prototype.text = utf8Text; + +// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer +StringDecoder.prototype.fillLast = function (buf) { + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); + this.lastNeed -= buf.length; +}; + +// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a +// continuation byte. If an invalid byte is detected, -2 is returned. +function utf8CheckByte(byte) { + if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; + return byte >> 6 === 0x02 ? -1 : -2; +} + +// Checks at most 3 bytes at the end of a Buffer in order to detect an +// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) +// needed to complete the UTF-8 character (if applicable) are returned. +function utf8CheckIncomplete(self, buf, i) { + var j = buf.length - 1; + if (j < i) return 0; + var nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 1; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 2; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) { + if (nb === 2) nb = 0;else self.lastNeed = nb - 3; + } + return nb; + } + return 0; +} + +// Validates as many continuation bytes for a multi-byte UTF-8 character as +// needed or are available. If we see a non-continuation byte where we expect +// one, we "replace" the validated continuation bytes we've seen so far with +// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding +// behavior. The continuation byte check is included three times in the case +// where all of the continuation bytes for a character exist in the same buffer. +// It is also done this way as a slight performance increase instead of using a +// loop. +function utf8CheckExtraBytes(self, buf, p) { + if ((buf[0] & 0xC0) !== 0x80) { + self.lastNeed = 0; + return '\ufffd'; + } + if (self.lastNeed > 1 && buf.length > 1) { + if ((buf[1] & 0xC0) !== 0x80) { + self.lastNeed = 1; + return '\ufffd'; + } + if (self.lastNeed > 2 && buf.length > 2) { + if ((buf[2] & 0xC0) !== 0x80) { + self.lastNeed = 2; + return '\ufffd'; + } + } + } +} + +// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. +function utf8FillLast(buf) { + var p = this.lastTotal - this.lastNeed; + var r = utf8CheckExtraBytes(this, buf, p); + if (r !== undefined) return r; + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, p, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, p, 0, buf.length); + this.lastNeed -= buf.length; +} + +// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a +// partial character, the character's bytes are buffered until the required +// number of bytes are available. +function utf8Text(buf, i) { + var total = utf8CheckIncomplete(this, buf, i); + if (!this.lastNeed) return buf.toString('utf8', i); + this.lastTotal = total; + var end = buf.length - (total - this.lastNeed); + buf.copy(this.lastChar, 0, end); + return buf.toString('utf8', i, end); +} + +// For UTF-8, a replacement character is added when ending on a partial +// character. +function utf8End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + '\ufffd'; + return r; +} + +// UTF-16LE typically needs two bytes per character, but even if we have an even +// number of bytes available, we need to check if we end on a leading/high +// surrogate. In that case, we need to wait for the next two bytes in order to +// decode the last character properly. +function utf16Text(buf, i) { + if ((buf.length - i) % 2 === 0) { + var r = buf.toString('utf16le', i); + if (r) { + var c = r.charCodeAt(r.length - 1); + if (c >= 0xD800 && c <= 0xDBFF) { + this.lastNeed = 2; + this.lastTotal = 4; + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + return r.slice(0, -1); + } + } + return r; + } + this.lastNeed = 1; + this.lastTotal = 2; + this.lastChar[0] = buf[buf.length - 1]; + return buf.toString('utf16le', i, buf.length - 1); +} + +// For UTF-16LE we do not explicitly append special replacement characters if we +// end on a partial character, we simply let v8 handle that. +function utf16End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) { + var end = this.lastTotal - this.lastNeed; + return r + this.lastChar.toString('utf16le', 0, end); + } + return r; +} + +function base64Text(buf, i) { + var n = (buf.length - i) % 3; + if (n === 0) return buf.toString('base64', i); + this.lastNeed = 3 - n; + this.lastTotal = 3; + if (n === 1) { + this.lastChar[0] = buf[buf.length - 1]; + } else { + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + } + return buf.toString('base64', i, buf.length - n); +} + +function base64End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); + return r; +} + +// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) +function simpleWrite(buf) { + return buf.toString(this.encoding); +} + +function simpleEnd(buf) { + return buf && buf.length ? this.write(buf) : ''; +} + +/***/ }), + +/***/ 6132: +/***/ ((module, exports, __nccwpck_require__) => { + +/*! safe-buffer. MIT License. Feross Aboukhadijeh */ +/* eslint-disable node/no-deprecated-api */ +var buffer = __nccwpck_require__(181) +var Buffer = buffer.Buffer + +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } +} +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer +} + +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.prototype = Object.create(Buffer.prototype) + +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) + +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') + } + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + } else { + buf.fill(0) + } + return buf +} + +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) +} + +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return buffer.SlowBuffer(size) +} + + +/***/ }), + +/***/ 6496: +/***/ ((module) => { + +const hexRegex = /^[-+]?0x[a-fA-F0-9]+$/; +const numRegex = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/; +// const octRegex = /^0x[a-z0-9]+/; +// const binRegex = /0x[a-z0-9]+/; + + +const consider = { + hex : true, + // oct: false, + leadingZeros: true, + decimalPoint: "\.", + eNotation: true, + //skipLike: /regex/ +}; + +function toNumber(str, options = {}){ + options = Object.assign({}, consider, options ); + if(!str || typeof str !== "string" ) return str; + + let trimmedStr = str.trim(); + + if(options.skipLike !== undefined && options.skipLike.test(trimmedStr)) return str; + else if(str==="0") return 0; + else if (options.hex && hexRegex.test(trimmedStr)) { + return parse_int(trimmedStr, 16); + // }else if (options.oct && octRegex.test(str)) { + // return Number.parseInt(val, 8); + }else if (trimmedStr.search(/[eE]/)!== -1) { //eNotation + const notation = trimmedStr.match(/^([-\+])?(0*)([0-9]*(\.[0-9]*)?[eE][-\+]?[0-9]+)$/); + // +00.123 => [ , '+', '00', '.123', .. + if(notation){ + // console.log(notation) + if(options.leadingZeros){ //accept with leading zeros + trimmedStr = (notation[1] || "") + notation[3]; + }else{ + if(notation[2] === "0" && notation[3][0]=== "."){ //valid number + }else{ + return str; + } + } + return options.eNotation ? Number(trimmedStr) : str; + }else{ + return str; + } + // }else if (options.parseBin && binRegex.test(str)) { + // return Number.parseInt(val, 2); + }else{ + //separate negative sign, leading zeros, and rest number + const match = numRegex.exec(trimmedStr); + // +00.123 => [ , '+', '00', '.123', .. + if(match){ + const sign = match[1]; + const leadingZeros = match[2]; + let numTrimmedByZeros = trimZeros(match[3]); //complete num without leading zeros + //trim ending zeros for floating number + + if(!options.leadingZeros && leadingZeros.length > 0 && sign && trimmedStr[2] !== ".") return str; //-0123 + else if(!options.leadingZeros && leadingZeros.length > 0 && !sign && trimmedStr[1] !== ".") return str; //0123 + else if(options.leadingZeros && leadingZeros===str) return 0; //00 + + else{//no leading zeros or leading zeros are allowed + const num = Number(trimmedStr); + const numStr = "" + num; + + if(numStr.search(/[eE]/) !== -1){ //given number is long and parsed to eNotation + if(options.eNotation) return num; + else return str; + }else if(trimmedStr.indexOf(".") !== -1){ //floating number + if(numStr === "0" && (numTrimmedByZeros === "") ) return num; //0.0 + else if(numStr === numTrimmedByZeros) return num; //0.456. 0.79000 + else if( sign && numStr === "-"+numTrimmedByZeros) return num; + else return str; + } + + if(leadingZeros){ + return (numTrimmedByZeros === numStr) || (sign+numTrimmedByZeros === numStr) ? num : str + }else { + return (trimmedStr === numStr) || (trimmedStr === sign+numStr) ? num : str + } + } + }else{ //non-numeric string + return str; + } + } +} + +/** + * + * @param {string} numStr without leading zeros + * @returns + */ +function trimZeros(numStr){ + if(numStr && numStr.indexOf(".") !== -1){//float + numStr = numStr.replace(/0+$/, ""); //remove ending zeros + if(numStr === ".") numStr = "0"; + else if(numStr[0] === ".") numStr = "0"+numStr; + else if(numStr[numStr.length-1] === ".") numStr = numStr.substr(0,numStr.length-1); + return numStr; + } + return numStr; +} + +function parse_int(numStr, base){ + //polyfill + if(parseInt) return parseInt(numStr, base); + else if(Number.parseInt) return Number.parseInt(numStr, base); + else if(window && window.parseInt) return window.parseInt(numStr, base); + else throw new Error("parseInt, Number.parseInt, window.parseInt are not supported") +} + +module.exports = toNumber; + +/***/ }), + +/***/ 3897: +/***/ ((module) => { + +"use strict"; + + +module.exports = function stubs(obj, method, cfg, stub) { + if (!obj || !method || !obj[method]) + throw new Error('You must provide an object and a key for an existing method') + + if (!stub) { + stub = cfg + cfg = {} + } + + stub = stub || function() {} + + cfg.callthrough = cfg.callthrough || false + cfg.calls = cfg.calls || 0 + + var norevert = cfg.calls === 0 + + var cached = obj[method].bind(obj) + + obj[method] = function() { + var args = [].slice.call(arguments) + var returnVal + + if (cfg.callthrough) + returnVal = cached.apply(obj, args) + + returnVal = stub.apply(obj, args) || returnVal + + if (!norevert && --cfg.calls === 0) + obj[method] = cached + + return returnVal + } +} + + +/***/ }), + +/***/ 1450: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +const os = __nccwpck_require__(857); +const tty = __nccwpck_require__(2018); +const hasFlag = __nccwpck_require__(3813); + +const {env} = process; + +let forceColor; +if (hasFlag('no-color') || + hasFlag('no-colors') || + hasFlag('color=false') || + hasFlag('color=never')) { + forceColor = 0; +} else if (hasFlag('color') || + hasFlag('colors') || + hasFlag('color=true') || + hasFlag('color=always')) { + forceColor = 1; +} + +if ('FORCE_COLOR' in env) { + if (env.FORCE_COLOR === 'true') { + forceColor = 1; + } else if (env.FORCE_COLOR === 'false') { + forceColor = 0; + } else { + forceColor = env.FORCE_COLOR.length === 0 ? 1 : Math.min(parseInt(env.FORCE_COLOR, 10), 3); + } +} + +function translateLevel(level) { + if (level === 0) { + return false; + } + + return { + level, + hasBasic: true, + has256: level >= 2, + has16m: level >= 3 + }; +} + +function supportsColor(haveStream, streamIsTTY) { + if (forceColor === 0) { + return 0; + } + + if (hasFlag('color=16m') || + hasFlag('color=full') || + hasFlag('color=truecolor')) { + return 3; + } + + if (hasFlag('color=256')) { + return 2; + } + + if (haveStream && !streamIsTTY && forceColor === undefined) { + return 0; + } + + const min = forceColor || 0; + + if (env.TERM === 'dumb') { + return min; + } + + if (process.platform === 'win32') { + // Windows 10 build 10586 is the first Windows release that supports 256 colors. + // Windows 10 build 14931 is the first release that supports 16m/TrueColor. + const osRelease = os.release().split('.'); + if ( + Number(osRelease[0]) >= 10 && + Number(osRelease[2]) >= 10586 + ) { + return Number(osRelease[2]) >= 14931 ? 3 : 2; + } + + return 1; + } + + if ('CI' in env) { + if (['TRAVIS', 'CIRCLECI', 'APPVEYOR', 'GITLAB_CI', 'GITHUB_ACTIONS', 'BUILDKITE'].some(sign => sign in env) || env.CI_NAME === 'codeship') { + return 1; + } + + return min; + } + + if ('TEAMCITY_VERSION' in env) { + return /^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0; + } + + if (env.COLORTERM === 'truecolor') { + return 3; + } + + if ('TERM_PROGRAM' in env) { + const version = parseInt((env.TERM_PROGRAM_VERSION || '').split('.')[0], 10); + + switch (env.TERM_PROGRAM) { + case 'iTerm.app': + return version >= 3 ? 3 : 2; + case 'Apple_Terminal': + return 2; + // No default + } + } + + if (/-256(color)?$/i.test(env.TERM)) { + return 2; + } + + if (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) { + return 1; + } + + if ('COLORTERM' in env) { + return 1; + } + + return min; +} + +function getSupportLevel(stream) { + const level = supportsColor(stream, stream && stream.isTTY); + return translateLevel(level); +} + +module.exports = { + supportsColor: getSupportLevel, + stdout: translateLevel(supportsColor(true, tty.isatty(1))), + stderr: translateLevel(supportsColor(true, tty.isatty(2))) +}; + + +/***/ }), + +/***/ 7745: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/** + * @license + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.TeenyStatistics = exports.TeenyStatisticsWarning = void 0; +/** + * @class TeenyStatisticsWarning + * @extends Error + * @description While an error, is used for emitting warnings when + * meeting certain configured thresholds. + * @see process.emitWarning + */ +class TeenyStatisticsWarning extends Error { + /** + * @param {string} message + */ + constructor(message) { + super(message); + this.threshold = 0; + this.type = ''; + this.value = 0; + this.name = this.constructor.name; + Error.captureStackTrace(this, this.constructor); + } +} +exports.TeenyStatisticsWarning = TeenyStatisticsWarning; +TeenyStatisticsWarning.CONCURRENT_REQUESTS = 'ConcurrentRequestsExceededWarning'; +/** + * @class TeenyStatistics + * @description Maintain various statistics internal to teeny-request. Tracking + * is not automatic and must be instrumented within teeny-request. + */ +class TeenyStatistics { + /** + * @param {TeenyStatisticsOptions} [opts] + */ + constructor(opts) { + /** + * @type {number} + * @private + * @default 0 + */ + this._concurrentRequests = 0; + /** + * @type {boolean} + * @private + * @default false + */ + this._didConcurrentRequestWarn = false; + this._options = TeenyStatistics._prepareOptions(opts); + } + /** + * Returns a copy of the current options. + * @return {TeenyStatisticsOptions} + */ + getOptions() { + return Object.assign({}, this._options); + } + /** + * Change configured statistics options. This will not preserve unspecified + * options that were previously specified, i.e. this is a reset of options. + * @param {TeenyStatisticsOptions} [opts] + * @returns {TeenyStatisticsConfig} The previous options. + * @see _prepareOptions + */ + setOptions(opts) { + const oldOpts = this._options; + this._options = TeenyStatistics._prepareOptions(opts); + return oldOpts; + } + /** + * @readonly + * @return {TeenyStatisticsCounters} + */ + get counters() { + return { + concurrentRequests: this._concurrentRequests, + }; + } + /** + * @description Should call this right before making a request. + */ + requestStarting() { + this._concurrentRequests++; + if (this._options.concurrentRequests > 0 && + this._concurrentRequests >= this._options.concurrentRequests && + !this._didConcurrentRequestWarn) { + this._didConcurrentRequestWarn = true; + const warning = new TeenyStatisticsWarning('Possible excessive concurrent requests detected. ' + + this._concurrentRequests + + ' requests in-flight, which exceeds the configured threshold of ' + + this._options.concurrentRequests + + '. Use the TEENY_REQUEST_WARN_CONCURRENT_REQUESTS environment ' + + 'variable or the concurrentRequests option of teeny-request to ' + + 'increase or disable (0) this warning.'); + warning.type = TeenyStatisticsWarning.CONCURRENT_REQUESTS; + warning.value = this._concurrentRequests; + warning.threshold = this._options.concurrentRequests; + process.emitWarning(warning); + } + } + /** + * @description When using `requestStarting`, call this after the request + * has finished. + */ + requestFinished() { + // TODO negative? + this._concurrentRequests--; + } + /** + * Configuration Precedence: + * 1. Dependency inversion via defined option. + * 2. Global numeric environment variable. + * 3. Built-in default. + * This will not preserve unspecified options previously specified. + * @param {TeenyStatisticsOptions} [opts] + * @returns {TeenyStatisticsOptions} + * @private + */ + static _prepareOptions({ concurrentRequests: diConcurrentRequests, } = {}) { + let concurrentRequests = this.DEFAULT_WARN_CONCURRENT_REQUESTS; + const envConcurrentRequests = Number(process.env.TEENY_REQUEST_WARN_CONCURRENT_REQUESTS); + if (diConcurrentRequests !== undefined) { + concurrentRequests = diConcurrentRequests; + } + else if (!Number.isNaN(envConcurrentRequests)) { + concurrentRequests = envConcurrentRequests; + } + return { concurrentRequests }; + } +} +exports.TeenyStatistics = TeenyStatistics; +/** + * @description A default threshold representing when to warn about excessive + * in-flight/concurrent requests. + * @type {number} + * @static + * @readonly + * @default 5000 + */ +TeenyStatistics.DEFAULT_WARN_CONCURRENT_REQUESTS = 5000; +//# sourceMappingURL=TeenyStatistics.js.map + +/***/ }), + +/***/ 4003: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/** + * @license + * Copyright 2019 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getAgent = exports.pool = void 0; +const http_1 = __nccwpck_require__(8611); +const https_1 = __nccwpck_require__(5692); +// eslint-disable-next-line node/no-deprecated-api +const url_1 = __nccwpck_require__(7016); +exports.pool = new Map(); +/** + * Determines if a proxy should be considered based on the environment. + * + * @param uri The request uri + * @returns {boolean} + */ +function shouldUseProxyForURI(uri) { + const noProxyEnv = process.env.NO_PROXY || process.env.no_proxy; + if (!noProxyEnv) { + return true; + } + const givenURI = new URL(uri); + for (const noProxyRaw of noProxyEnv.split(',')) { + const noProxy = noProxyRaw.trim(); + if (noProxy === givenURI.origin || noProxy === givenURI.hostname) { + return false; + } + else if (noProxy.startsWith('*.') || noProxy.startsWith('.')) { + const noProxyWildcard = noProxy.replace(/^\*\./, '.'); + if (givenURI.hostname.endsWith(noProxyWildcard)) { + return false; + } + } + } + return true; +} +/** + * Returns a custom request Agent if one is found, otherwise returns undefined + * which will result in the global http(s) Agent being used. + * @private + * @param {string} uri The request uri + * @param {Options} reqOpts The request options + * @returns {HttpAnyAgent|undefined} + */ +function getAgent(uri, reqOpts) { + const isHttp = uri.startsWith('http://'); + const proxy = reqOpts.proxy || + process.env.HTTP_PROXY || + process.env.http_proxy || + process.env.HTTPS_PROXY || + process.env.https_proxy; + const poolOptions = Object.assign({}, reqOpts.pool); + const manuallyProvidedProxy = !!reqOpts.proxy; + const shouldUseProxy = manuallyProvidedProxy || shouldUseProxyForURI(uri); + if (proxy && shouldUseProxy) { + // tslint:disable-next-line variable-name + const Agent = isHttp + ? __nccwpck_require__(1970) + : __nccwpck_require__(6518); + const proxyOpts = { ...(0, url_1.parse)(proxy), ...poolOptions }; + return new Agent(proxyOpts); + } + let key = isHttp ? 'http' : 'https'; + if (reqOpts.forever) { + key += ':forever'; + if (!exports.pool.has(key)) { + // tslint:disable-next-line variable-name + const Agent = isHttp ? http_1.Agent : https_1.Agent; + exports.pool.set(key, new Agent({ ...poolOptions, keepAlive: true })); + } + } + return exports.pool.get(key); +} +exports.getAgent = getAgent; +//# sourceMappingURL=agents.js.map + +/***/ }), + +/***/ 321: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/** + * @license + * Copyright 2018 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.teenyRequest = exports.RequestError = void 0; +const node_fetch_1 = __nccwpck_require__(6705); +const stream_1 = __nccwpck_require__(2203); +const uuid = __nccwpck_require__(8993); +const agents_1 = __nccwpck_require__(4003); +const TeenyStatistics_1 = __nccwpck_require__(7745); +// eslint-disable-next-line @typescript-eslint/no-var-requires +const streamEvents = __nccwpck_require__(1546); +class RequestError extends Error { +} +exports.RequestError = RequestError; +/** + * Convert options from Request to Fetch format + * @private + * @param reqOpts Request options + */ +function requestToFetchOptions(reqOpts) { + const options = { + method: reqOpts.method || 'GET', + ...(reqOpts.timeout && { timeout: reqOpts.timeout }), + ...(typeof reqOpts.gzip === 'boolean' && { compress: reqOpts.gzip }), + }; + if (typeof reqOpts.json === 'object') { + // Add Content-type: application/json header + reqOpts.headers = reqOpts.headers || {}; + reqOpts.headers['Content-Type'] = 'application/json'; + // Set body to JSON representation of value + options.body = JSON.stringify(reqOpts.json); + } + else { + if (Buffer.isBuffer(reqOpts.body)) { + options.body = reqOpts.body; + } + else if (typeof reqOpts.body !== 'string') { + options.body = JSON.stringify(reqOpts.body); + } + else { + options.body = reqOpts.body; + } + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + options.headers = reqOpts.headers; + let uri = (reqOpts.uri || + reqOpts.url); + if (!uri) { + throw new Error('Missing uri or url in reqOpts.'); + } + if (reqOpts.useQuerystring === true || typeof reqOpts.qs === 'object') { + // eslint-disable-next-line @typescript-eslint/no-var-requires + const qs = __nccwpck_require__(3480); + const params = qs.stringify(reqOpts.qs); + uri = uri + '?' + params; + } + options.agent = (0, agents_1.getAgent)(uri, reqOpts); + return { uri, options }; +} +/** + * Convert a response from `fetch` to `request` format. + * @private + * @param opts The `request` options used to create the request. + * @param res The Fetch response + * @returns A `request` response object + */ +function fetchToRequestResponse(opts, res) { + const request = {}; + request.agent = opts.agent || false; + request.headers = (opts.headers || {}); + request.href = res.url; + // headers need to be converted from a map to an obj + const resHeaders = {}; + res.headers.forEach((value, key) => (resHeaders[key] = value)); + const response = Object.assign(res.body, { + statusCode: res.status, + statusMessage: res.statusText, + request, + body: res.body, + headers: resHeaders, + toJSON: () => ({ headers: resHeaders }), + }); + return response; +} +/** + * Create POST body from two parts as multipart/related content-type + * @private + * @param boundary + * @param multipart + */ +function createMultipartStream(boundary, multipart) { + const finale = `--${boundary}--`; + const stream = new stream_1.PassThrough(); + for (const part of multipart) { + const preamble = `--${boundary}\r\nContent-Type: ${part['Content-Type']}\r\n\r\n`; + stream.write(preamble); + if (typeof part.body === 'string') { + stream.write(part.body); + stream.write('\r\n'); + } + else { + part.body.pipe(stream, { end: false }); + part.body.on('end', () => { + stream.write('\r\n'); + stream.write(finale); + stream.end(); + }); + } + } + return stream; +} +function teenyRequest(reqOpts, callback) { + const { uri, options } = requestToFetchOptions(reqOpts); + const multipart = reqOpts.multipart; + if (reqOpts.multipart && multipart.length === 2) { + if (!callback) { + // TODO: add support for multipart uploads through streaming + throw new Error('Multipart without callback is not implemented.'); + } + const boundary = uuid.v4(); + options.headers['Content-Type'] = `multipart/related; boundary=${boundary}`; + options.body = createMultipartStream(boundary, multipart); + // Multipart upload + teenyRequest.stats.requestStarting(); + (0, node_fetch_1.default)(uri, options).then(res => { + teenyRequest.stats.requestFinished(); + const header = res.headers.get('content-type'); + const response = fetchToRequestResponse(options, res); + const body = response.body; + if (header === 'application/json' || + header === 'application/json; charset=utf-8') { + res.json().then(json => { + response.body = json; + callback(null, response, json); + }, (err) => { + callback(err, response, body); + }); + return; + } + res.text().then(text => { + response.body = text; + callback(null, response, text); + }, err => { + callback(err, response, body); + }); + }, err => { + teenyRequest.stats.requestFinished(); + callback(err, null, null); + }); + return; + } + if (callback === undefined) { + // Stream mode + const requestStream = streamEvents(new stream_1.PassThrough()); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let responseStream; + requestStream.once('reading', () => { + if (responseStream) { + (0, stream_1.pipeline)(responseStream, requestStream, () => { }); + } + else { + requestStream.once('response', () => { + (0, stream_1.pipeline)(responseStream, requestStream, () => { }); + }); + } + }); + options.compress = false; + teenyRequest.stats.requestStarting(); + (0, node_fetch_1.default)(uri, options).then(res => { + teenyRequest.stats.requestFinished(); + responseStream = res.body; + responseStream.on('error', (err) => { + requestStream.emit('error', err); + }); + const response = fetchToRequestResponse(options, res); + requestStream.emit('response', response); + }, err => { + teenyRequest.stats.requestFinished(); + requestStream.emit('error', err); + }); + // fetch doesn't supply the raw HTTP stream, instead it + // returns a PassThrough piped from the HTTP response + // stream. + return requestStream; + } + // GET or POST with callback + teenyRequest.stats.requestStarting(); + (0, node_fetch_1.default)(uri, options).then(res => { + teenyRequest.stats.requestFinished(); + const header = res.headers.get('content-type'); + const response = fetchToRequestResponse(options, res); + const body = response.body; + if (header === 'application/json' || + header === 'application/json; charset=utf-8') { + if (response.statusCode === 204) { + // Probably a DELETE + callback(null, response, body); + return; + } + res.json().then(json => { + response.body = json; + callback(null, response, json); + }, err => { + callback(err, response, body); + }); + return; + } + res.text().then(text => { + const response = fetchToRequestResponse(options, res); + response.body = text; + callback(null, response, text); + }, err => { + callback(err, response, body); + }); + }, err => { + teenyRequest.stats.requestFinished(); + callback(err, null, null); + }); + return; +} +exports.teenyRequest = teenyRequest; +teenyRequest.defaults = (defaults) => { + return (reqOpts, callback) => { + const opts = { ...defaults, ...reqOpts }; + if (callback === undefined) { + return teenyRequest(opts); + } + teenyRequest(opts, callback); + }; +}; +/** + * Single instance of an interface for keeping track of things. + */ +teenyRequest.stats = new TeenyStatistics_1.TeenyStatistics(); +teenyRequest.resetStats = () => { + teenyRequest.stats = new TeenyStatistics_1.TeenyStatistics(teenyRequest.stats.getOptions()); +}; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 7954: +/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const events_1 = __nccwpck_require__(4434); +const debug_1 = __importDefault(__nccwpck_require__(2830)); +const promisify_1 = __importDefault(__nccwpck_require__(4446)); +const debug = debug_1.default('agent-base'); +function isAgent(v) { + return Boolean(v) && typeof v.addRequest === 'function'; +} +function isSecureEndpoint() { + const { stack } = new Error(); + if (typeof stack !== 'string') + return false; + return stack.split('\n').some(l => l.indexOf('(https.js:') !== -1 || l.indexOf('node:https:') !== -1); +} +function createAgent(callback, opts) { + return new createAgent.Agent(callback, opts); +} +(function (createAgent) { + /** + * Base `http.Agent` implementation. + * No pooling/keep-alive is implemented by default. + * + * @param {Function} callback + * @api public + */ + class Agent extends events_1.EventEmitter { + constructor(callback, _opts) { + super(); + let opts = _opts; + if (typeof callback === 'function') { + this.callback = callback; + } + else if (callback) { + opts = callback; + } + // Timeout for the socket to be returned from the callback + this.timeout = null; + if (opts && typeof opts.timeout === 'number') { + this.timeout = opts.timeout; + } + // These aren't actually used by `agent-base`, but are required + // for the TypeScript definition files in `@types/node` :/ + this.maxFreeSockets = 1; + this.maxSockets = 1; + this.maxTotalSockets = Infinity; + this.sockets = {}; + this.freeSockets = {}; + this.requests = {}; + this.options = {}; + } + get defaultPort() { + if (typeof this.explicitDefaultPort === 'number') { + return this.explicitDefaultPort; + } + return isSecureEndpoint() ? 443 : 80; + } + set defaultPort(v) { + this.explicitDefaultPort = v; + } + get protocol() { + if (typeof this.explicitProtocol === 'string') { + return this.explicitProtocol; + } + return isSecureEndpoint() ? 'https:' : 'http:'; + } + set protocol(v) { + this.explicitProtocol = v; + } + callback(req, opts, fn) { + throw new Error('"agent-base" has no default implementation, you must subclass and override `callback()`'); + } + /** + * Called by node-core's "_http_client.js" module when creating + * a new HTTP request with this Agent instance. + * + * @api public + */ + addRequest(req, _opts) { + const opts = Object.assign({}, _opts); + if (typeof opts.secureEndpoint !== 'boolean') { + opts.secureEndpoint = isSecureEndpoint(); + } + if (opts.host == null) { + opts.host = 'localhost'; + } + if (opts.port == null) { + opts.port = opts.secureEndpoint ? 443 : 80; + } + if (opts.protocol == null) { + opts.protocol = opts.secureEndpoint ? 'https:' : 'http:'; + } + if (opts.host && opts.path) { + // If both a `host` and `path` are specified then it's most + // likely the result of a `url.parse()` call... we need to + // remove the `path` portion so that `net.connect()` doesn't + // attempt to open that as a unix socket file. + delete opts.path; + } + delete opts.agent; + delete opts.hostname; + delete opts._defaultAgent; + delete opts.defaultPort; + delete opts.createConnection; + // Hint to use "Connection: close" + // XXX: non-documented `http` module API :( + req._last = true; + req.shouldKeepAlive = false; + let timedOut = false; + let timeoutId = null; + const timeoutMs = opts.timeout || this.timeout; + const onerror = (err) => { + if (req._hadError) + return; + req.emit('error', err); + // For Safety. Some additional errors might fire later on + // and we need to make sure we don't double-fire the error event. + req._hadError = true; + }; + const ontimeout = () => { + timeoutId = null; + timedOut = true; + const err = new Error(`A "socket" was not created for HTTP request before ${timeoutMs}ms`); + err.code = 'ETIMEOUT'; + onerror(err); + }; + const callbackError = (err) => { + if (timedOut) + return; + if (timeoutId !== null) { + clearTimeout(timeoutId); + timeoutId = null; + } + onerror(err); + }; + const onsocket = (socket) => { + if (timedOut) + return; + if (timeoutId != null) { + clearTimeout(timeoutId); + timeoutId = null; + } + if (isAgent(socket)) { + // `socket` is actually an `http.Agent` instance, so + // relinquish responsibility for this `req` to the Agent + // from here on + debug('Callback returned another Agent instance %o', socket.constructor.name); + socket.addRequest(req, opts); + return; + } + if (socket) { + socket.once('free', () => { + this.freeSocket(socket, opts); + }); + req.onSocket(socket); + return; + } + const err = new Error(`no Duplex stream was returned to agent-base for \`${req.method} ${req.path}\``); + onerror(err); + }; + if (typeof this.callback !== 'function') { + onerror(new Error('`callback` is not defined')); + return; + } + if (!this.promisifiedCallback) { + if (this.callback.length >= 3) { + debug('Converting legacy callback function to promise'); + this.promisifiedCallback = promisify_1.default(this.callback); + } + else { + this.promisifiedCallback = this.callback; + } + } + if (typeof timeoutMs === 'number' && timeoutMs > 0) { + timeoutId = setTimeout(ontimeout, timeoutMs); + } + if ('port' in opts && typeof opts.port !== 'number') { + opts.port = Number(opts.port); + } + try { + debug('Resolving socket for %o request: %o', opts.protocol, `${req.method} ${req.path}`); + Promise.resolve(this.promisifiedCallback(req, opts)).then(onsocket, callbackError); + } + catch (err) { + Promise.reject(err).catch(callbackError); + } + } + freeSocket(socket, opts) { + debug('Freeing socket %o %o', socket.constructor.name, opts); + socket.destroy(); + } + destroy() { + debug('Destroying agent %o', this.constructor.name); + } + } + createAgent.Agent = Agent; + // So that `instanceof` works correctly + createAgent.prototype = createAgent.Agent.prototype; +})(createAgent || (createAgent = {})); +module.exports = createAgent; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 4446: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +function promisify(fn) { + return function (req, opts) { + return new Promise((resolve, reject) => { + fn.call(this, req, opts, (err, rtn) => { + if (err) { + reject(err); + } + else { + resolve(rtn); + } + }); + }); + }; +} +exports["default"] = promisify; +//# sourceMappingURL=promisify.js.map + +/***/ }), + +/***/ 5299: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const net_1 = __importDefault(__nccwpck_require__(9278)); +const tls_1 = __importDefault(__nccwpck_require__(4756)); +const url_1 = __importDefault(__nccwpck_require__(7016)); +const assert_1 = __importDefault(__nccwpck_require__(2613)); +const debug_1 = __importDefault(__nccwpck_require__(2830)); +const agent_base_1 = __nccwpck_require__(7954); +const parse_proxy_response_1 = __importDefault(__nccwpck_require__(7742)); +const debug = debug_1.default('https-proxy-agent:agent'); +/** + * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to + * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests. + * + * Outgoing HTTP requests are first tunneled through the proxy server using the + * `CONNECT` HTTP request method to establish a connection to the proxy server, + * and then the proxy server connects to the destination target and issues the + * HTTP request from the proxy server. + * + * `https:` requests have their socket connection upgraded to TLS once + * the connection to the proxy server has been established. + * + * @api public + */ +class HttpsProxyAgent extends agent_base_1.Agent { + constructor(_opts) { + let opts; + if (typeof _opts === 'string') { + opts = url_1.default.parse(_opts); + } + else { + opts = _opts; + } + if (!opts) { + throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!'); + } + debug('creating new HttpsProxyAgent instance: %o', opts); + super(opts); + const proxy = Object.assign({}, opts); + // If `true`, then connect to the proxy server over TLS. + // Defaults to `false`. + this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol); + // Prefer `hostname` over `host`, and set the `port` if needed. + proxy.host = proxy.hostname || proxy.host; + if (typeof proxy.port === 'string') { + proxy.port = parseInt(proxy.port, 10); + } + if (!proxy.port && proxy.host) { + proxy.port = this.secureProxy ? 443 : 80; + } + // ALPN is supported by Node.js >= v5. + // attempt to negotiate http/1.1 for proxy servers that support http/2 + if (this.secureProxy && !('ALPNProtocols' in proxy)) { + proxy.ALPNProtocols = ['http 1.1']; + } + if (proxy.host && proxy.path) { + // If both a `host` and `path` are specified then it's most likely + // the result of a `url.parse()` call... we need to remove the + // `path` portion so that `net.connect()` doesn't attempt to open + // that as a Unix socket file. + delete proxy.path; + delete proxy.pathname; + } + this.proxy = proxy; + } + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + * + * @api protected + */ + callback(req, opts) { + return __awaiter(this, void 0, void 0, function* () { + const { proxy, secureProxy } = this; + // Create a socket connection to the proxy server. + let socket; + if (secureProxy) { + debug('Creating `tls.Socket`: %o', proxy); + socket = tls_1.default.connect(proxy); + } + else { + debug('Creating `net.Socket`: %o', proxy); + socket = net_1.default.connect(proxy); + } + const headers = Object.assign({}, proxy.headers); + const hostname = `${opts.host}:${opts.port}`; + let payload = `CONNECT ${hostname} HTTP/1.1\r\n`; + // Inject the `Proxy-Authorization` header if necessary. + if (proxy.auth) { + headers['Proxy-Authorization'] = `Basic ${Buffer.from(proxy.auth).toString('base64')}`; + } + // The `Host` header should only include the port + // number when it is not the default port. + let { host, port, secureEndpoint } = opts; + if (!isDefaultPort(port, secureEndpoint)) { + host += `:${port}`; + } + headers.Host = host; + headers.Connection = 'close'; + for (const name of Object.keys(headers)) { + payload += `${name}: ${headers[name]}\r\n`; + } + const proxyResponsePromise = parse_proxy_response_1.default(socket); + socket.write(`${payload}\r\n`); + const { statusCode, buffered } = yield proxyResponsePromise; + if (statusCode === 200) { + req.once('socket', resume); + if (opts.secureEndpoint) { + // The proxy is connecting to a TLS server, so upgrade + // this socket connection to a TLS connection. + debug('Upgrading socket connection to TLS'); + const servername = opts.servername || opts.host; + return tls_1.default.connect(Object.assign(Object.assign({}, omit(opts, 'host', 'hostname', 'path', 'port')), { socket, + servername })); + } + return socket; + } + // Some other status code that's not 200... need to re-play the HTTP + // header "data" events onto the socket once the HTTP machinery is + // attached so that the node core `http` can parse and handle the + // error status code. + // Close the original socket, and a new "fake" socket is returned + // instead, so that the proxy doesn't get the HTTP request + // written to it (which may contain `Authorization` headers or other + // sensitive data). + // + // See: https://hackerone.com/reports/541502 + socket.destroy(); + const fakeSocket = new net_1.default.Socket({ writable: false }); + fakeSocket.readable = true; + // Need to wait for the "socket" event to re-play the "data" events. + req.once('socket', (s) => { + debug('replaying proxy buffer for failed request'); + assert_1.default(s.listenerCount('data') > 0); + // Replay the "buffered" Buffer onto the fake `socket`, since at + // this point the HTTP module machinery has been hooked up for + // the user. + s.push(buffered); + s.push(null); + }); + return fakeSocket; + }); + } +} +exports["default"] = HttpsProxyAgent; +function resume(socket) { + socket.resume(); +} +function isDefaultPort(port, secure) { + return Boolean((!secure && port === 80) || (secure && port === 443)); +} +function isHTTPS(protocol) { + return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false; +} +function omit(obj, ...keys) { + const ret = {}; + let key; + for (key in obj) { + if (!keys.includes(key)) { + ret[key] = obj[key]; + } + } + return ret; +} +//# sourceMappingURL=agent.js.map + +/***/ }), + +/***/ 6518: +/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const agent_1 = __importDefault(__nccwpck_require__(5299)); +function createHttpsProxyAgent(opts) { + return new agent_1.default(opts); +} +(function (createHttpsProxyAgent) { + createHttpsProxyAgent.HttpsProxyAgent = agent_1.default; + createHttpsProxyAgent.prototype = agent_1.default.prototype; +})(createHttpsProxyAgent || (createHttpsProxyAgent = {})); +module.exports = createHttpsProxyAgent; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 7742: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const debug_1 = __importDefault(__nccwpck_require__(2830)); +const debug = debug_1.default('https-proxy-agent:parse-proxy-response'); +function parseProxyResponse(socket) { + return new Promise((resolve, reject) => { + // we need to buffer any HTTP traffic that happens with the proxy before we get + // the CONNECT response, so that if the response is anything other than an "200" + // response code, then we can re-play the "data" events on the socket once the + // HTTP parser is hooked up... + let buffersLength = 0; + const buffers = []; + function read() { + const b = socket.read(); + if (b) + ondata(b); + else + socket.once('readable', read); + } + function cleanup() { + socket.removeListener('end', onend); + socket.removeListener('error', onerror); + socket.removeListener('close', onclose); + socket.removeListener('readable', read); + } + function onclose(err) { + debug('onclose had error %o', err); + } + function onend() { + debug('onend'); + } + function onerror(err) { + cleanup(); + debug('onerror %o', err); + reject(err); + } + function ondata(b) { + buffers.push(b); + buffersLength += b.length; + const buffered = Buffer.concat(buffers, buffersLength); + const endOfHeaders = buffered.indexOf('\r\n\r\n'); + if (endOfHeaders === -1) { + // keep buffering + debug('have not received end of HTTP headers yet...'); + read(); + return; + } + const firstLine = buffered.toString('ascii', 0, buffered.indexOf('\r\n')); + const statusCode = +firstLine.split(' ')[1]; + debug('got proxy server response: %o', firstLine); + resolve({ + statusCode, + buffered + }); + } + socket.on('error', onerror); + socket.on('close', onclose); + socket.on('end', onend); + read(); + }); +} +exports["default"] = parseProxyResponse; +//# sourceMappingURL=parse-proxy-response.js.map + +/***/ }), + +/***/ 8993: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +Object.defineProperty(exports, "NIL", ({ + enumerable: true, + get: function () { + return _nil.default; + } +})); +Object.defineProperty(exports, "parse", ({ + enumerable: true, + get: function () { + return _parse.default; + } +})); +Object.defineProperty(exports, "stringify", ({ + enumerable: true, + get: function () { + return _stringify.default; + } +})); +Object.defineProperty(exports, "v1", ({ + enumerable: true, + get: function () { + return _v.default; + } +})); +Object.defineProperty(exports, "v3", ({ + enumerable: true, + get: function () { + return _v2.default; + } +})); +Object.defineProperty(exports, "v4", ({ + enumerable: true, + get: function () { + return _v3.default; + } +})); +Object.defineProperty(exports, "v5", ({ + enumerable: true, + get: function () { + return _v4.default; + } +})); +Object.defineProperty(exports, "validate", ({ + enumerable: true, + get: function () { + return _validate.default; + } +})); +Object.defineProperty(exports, "version", ({ + enumerable: true, + get: function () { + return _version.default; + } +})); + +var _v = _interopRequireDefault(__nccwpck_require__(4684)); + +var _v2 = _interopRequireDefault(__nccwpck_require__(3142)); + +var _v3 = _interopRequireDefault(__nccwpck_require__(9655)); + +var _v4 = _interopRequireDefault(__nccwpck_require__(880)); + +var _nil = _interopRequireDefault(__nccwpck_require__(194)); + +var _version = _interopRequireDefault(__nccwpck_require__(6257)); + +var _validate = _interopRequireDefault(__nccwpck_require__(1579)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(1400)); + +var _parse = _interopRequireDefault(__nccwpck_require__(7814)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/***/ }), + +/***/ 8061: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); +} + +var _default = md5; +exports["default"] = _default; + +/***/ }), + +/***/ 7966: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var _default = { + randomUUID: _crypto.default.randomUUID +}; +exports["default"] = _default; + +/***/ }), + +/***/ 194: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports["default"] = _default; + +/***/ }), + +/***/ 7814: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(1579)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports["default"] = _default; + +/***/ }), + +/***/ 1118: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports["default"] = _default; + +/***/ }), + +/***/ 8540: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = rng; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; + +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); + + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} + +/***/ }), + +/***/ 1352: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('sha1').update(bytes).digest(); +} + +var _default = sha1; +exports["default"] = _default; + +/***/ }), + +/***/ 1400: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +exports.unsafeStringify = unsafeStringify; + +var _validate = _interopRequireDefault(__nccwpck_require__(1579)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).slice(1)); +} + +function unsafeStringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]; +} + +function stringify(arr, offset = 0) { + const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports["default"] = _default; + +/***/ }), + +/***/ 4684: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(8540)); + +var _stringify = __nccwpck_require__(1400); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.unsafeStringify)(b); +} + +var _default = v1; +exports["default"] = _default; + +/***/ }), + +/***/ 3142: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(4575)); + +var _md = _interopRequireDefault(__nccwpck_require__(8061)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports["default"] = _default; + +/***/ }), + +/***/ 4575: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports.URL = exports.DNS = void 0; +exports["default"] = v35; + +var _stringify = __nccwpck_require__(1400); + +var _parse = _interopRequireDefault(__nccwpck_require__(7814)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function v35(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + var _namespace; + + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} + +/***/ }), + +/***/ 9655: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _native = _interopRequireDefault(__nccwpck_require__(7966)); + +var _rng = _interopRequireDefault(__nccwpck_require__(8540)); + +var _stringify = __nccwpck_require__(1400); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + if (_native.default.randomUUID && !buf && !options) { + return _native.default.randomUUID(); + } + + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(rnds); +} + +var _default = v4; +exports["default"] = _default; + +/***/ }), + +/***/ 880: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(4575)); + +var _sha = _interopRequireDefault(__nccwpck_require__(1352)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports["default"] = _default; + +/***/ }), + +/***/ 1579: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _regex = _interopRequireDefault(__nccwpck_require__(1118)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports["default"] = _default; + +/***/ }), + +/***/ 6257: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(1579)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.slice(14, 15), 16); +} + +var _default = version; +exports["default"] = _default; + /***/ }), /***/ 770: @@ -59887,6 +86241,705 @@ if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { exports.debug = debug; // for test +/***/ }), + +/***/ 4488: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + + +/** + * For Node.js, simply re-export the core `util.deprecate` function. + */ + +module.exports = __nccwpck_require__(9023).deprecate; + + +/***/ }), + +/***/ 2048: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +Object.defineProperty(exports, "v1", ({ + enumerable: true, + get: function () { + return _v.default; + } +})); +Object.defineProperty(exports, "v3", ({ + enumerable: true, + get: function () { + return _v2.default; + } +})); +Object.defineProperty(exports, "v4", ({ + enumerable: true, + get: function () { + return _v3.default; + } +})); +Object.defineProperty(exports, "v5", ({ + enumerable: true, + get: function () { + return _v4.default; + } +})); +Object.defineProperty(exports, "NIL", ({ + enumerable: true, + get: function () { + return _nil.default; + } +})); +Object.defineProperty(exports, "version", ({ + enumerable: true, + get: function () { + return _version.default; + } +})); +Object.defineProperty(exports, "validate", ({ + enumerable: true, + get: function () { + return _validate.default; + } +})); +Object.defineProperty(exports, "stringify", ({ + enumerable: true, + get: function () { + return _stringify.default; + } +})); +Object.defineProperty(exports, "parse", ({ + enumerable: true, + get: function () { + return _parse.default; + } +})); + +var _v = _interopRequireDefault(__nccwpck_require__(6415)); + +var _v2 = _interopRequireDefault(__nccwpck_require__(1697)); + +var _v3 = _interopRequireDefault(__nccwpck_require__(4676)); + +var _v4 = _interopRequireDefault(__nccwpck_require__(9771)); + +var _nil = _interopRequireDefault(__nccwpck_require__(7723)); + +var _version = _interopRequireDefault(__nccwpck_require__(5868)); + +var _validate = _interopRequireDefault(__nccwpck_require__(6200)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(7597)); + +var _parse = _interopRequireDefault(__nccwpck_require__(7267)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/***/ }), + +/***/ 216: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); +} + +var _default = md5; +exports["default"] = _default; + +/***/ }), + +/***/ 7723: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports["default"] = _default; + +/***/ }), + +/***/ 7267: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(6200)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports["default"] = _default; + +/***/ }), + +/***/ 7879: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports["default"] = _default; + +/***/ }), + +/***/ 2973: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = rng; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; + +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); + + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} + +/***/ }), + +/***/ 507: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('sha1').update(bytes).digest(); +} + +var _default = sha1; +exports["default"] = _default; + +/***/ }), + +/***/ 7597: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(6200)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).substr(1)); +} + +function stringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports["default"] = _default; + +/***/ }), + +/***/ 6415: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(2973)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(7597)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.default)(b); +} + +var _default = v1; +exports["default"] = _default; + +/***/ }), + +/***/ 1697: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(2930)); + +var _md = _interopRequireDefault(__nccwpck_require__(216)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports["default"] = _default; + +/***/ }), + +/***/ 2930: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = _default; +exports.URL = exports.DNS = void 0; + +var _stringify = _interopRequireDefault(__nccwpck_require__(7597)); + +var _parse = _interopRequireDefault(__nccwpck_require__(7267)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function _default(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (namespace.length !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.default)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} + +/***/ }), + +/***/ 4676: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(2973)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(7597)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.default)(rnds); +} + +var _default = v4; +exports["default"] = _default; + +/***/ }), + +/***/ 9771: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(2930)); + +var _sha = _interopRequireDefault(__nccwpck_require__(507)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports["default"] = _default; + +/***/ }), + +/***/ 6200: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _regex = _interopRequireDefault(__nccwpck_require__(7879)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports["default"] = _default; + +/***/ }), + +/***/ 5868: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(6200)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.substr(14, 1), 16); +} + +var _default = version; +exports["default"] = _default; + +/***/ }), + +/***/ 8264: +/***/ ((module) => { + +// Returns a wrapper function that returns a wrapped callback +// The wrapper function should do some stuff, and return a +// presumably different callback function. +// This makes sure that own properties are retained, so that +// decorations and such are not lost along the way. +module.exports = wrappy +function wrappy (fn, cb) { + if (fn && cb) return wrappy(fn)(cb) + + if (typeof fn !== 'function') + throw new TypeError('need wrapper function') + + Object.keys(fn).forEach(function (k) { + wrapper[k] = fn[k] + }) + + return wrapper + + function wrapper() { + var args = new Array(arguments.length) + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i] + } + var ret = fn.apply(this, args) + var cb = args[args.length-1] + if (typeof ret === 'function' && ret !== cb) { + Object.keys(cb).forEach(function (k) { + ret[k] = cb[k] + }) + } + return ret + } +} + + /***/ }), /***/ 8736: @@ -64894,6 +91947,81 @@ exports.debug = debug; // for test }).call(this); +/***/ }), + +/***/ 538: +/***/ ((module) => { + +class Node { + /// value; + /// next; + + constructor(value) { + this.value = value; + + // TODO: Remove this when targeting Node.js 12. + this.next = undefined; + } +} + +class Queue { + // TODO: Use private class fields when targeting Node.js 12. + // #_head; + // #_tail; + // #_size; + + constructor() { + this.clear(); + } + + enqueue(value) { + const node = new Node(value); + + if (this._head) { + this._tail.next = node; + this._tail = node; + } else { + this._head = node; + this._tail = node; + } + + this._size++; + } + + dequeue() { + const current = this._head; + if (!current) { + return; + } + + this._head = this._head.next; + this._size--; + return current.value; + } + + clear() { + this._head = undefined; + this._tail = undefined; + this._size = 0; + } + + get size() { + return this._size; + } + + * [Symbol.iterator]() { + let current = this._head; + + while (current) { + yield current.value; + current = current.next; + } + } +} + +module.exports = Queue; + + /***/ }), /***/ 7242: @@ -64911,25 +92039,27 @@ var Inputs; Inputs["UploadChunkSize"] = "upload-chunk-size"; Inputs["EnableCrossOsArchive"] = "enableCrossOsArchive"; Inputs["FailOnCacheMiss"] = "fail-on-cache-miss"; - Inputs["LookupOnly"] = "lookup-only"; // Input for cache, restore action -})(Inputs = exports.Inputs || (exports.Inputs = {})); + Inputs["LookupOnly"] = "lookup-only"; + Inputs["GCSBucket"] = "gcs-bucket"; + Inputs["GCSPathPrefix"] = "gcs-path-prefix"; // Input for cache, restore, save action +})(Inputs || (exports.Inputs = Inputs = {})); var Outputs; (function (Outputs) { Outputs["CacheHit"] = "cache-hit"; Outputs["CachePrimaryKey"] = "cache-primary-key"; Outputs["CacheMatchedKey"] = "cache-matched-key"; // Output from restore action -})(Outputs = exports.Outputs || (exports.Outputs = {})); +})(Outputs || (exports.Outputs = Outputs = {})); var State; (function (State) { State["CachePrimaryKey"] = "CACHE_KEY"; State["CacheMatchedKey"] = "CACHE_RESULT"; -})(State = exports.State || (exports.State = {})); +})(State || (exports.State = State = {})); var Events; (function (Events) { Events["Key"] = "GITHUB_EVENT_NAME"; Events["Push"] = "push"; Events["PullRequest"] = "pull_request"; -})(Events = exports.Events || (exports.Events = {})); +})(Events || (exports.Events = Events = {})); exports.RefKey = "GITHUB_REF"; @@ -64956,13 +92086,23 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( }) : function(o, v) { o["default"] = v; }); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { @@ -64973,8 +92113,10 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.restoreRun = exports.restoreOnlyRun = exports.restoreImpl = void 0; -const cache = __importStar(__nccwpck_require__(5116)); +exports.restoreImpl = restoreImpl; +exports.restoreOnlyRun = restoreOnlyRun; +exports.restoreRun = restoreRun; +const cache = __importStar(__nccwpck_require__(9614)); const core = __importStar(__nccwpck_require__(7484)); const constants_1 = __nccwpck_require__(7242); const stateProvider_1 = __nccwpck_require__(2879); @@ -65033,7 +92175,6 @@ function restoreImpl(stateProvider, earlyExit) { } }); } -exports.restoreImpl = restoreImpl; function run(stateProvider, earlyExit) { return __awaiter(this, void 0, void 0, function* () { yield restoreImpl(stateProvider, earlyExit); @@ -65052,13 +92193,11 @@ function restoreOnlyRun(earlyExit) { yield run(new stateProvider_1.NullStateProvider(), earlyExit); }); } -exports.restoreOnlyRun = restoreOnlyRun; function restoreRun(earlyExit) { return __awaiter(this, void 0, void 0, function* () { yield run(new stateProvider_1.StateProvider(), earlyExit); }); } -exports.restoreRun = restoreRun; /***/ }), @@ -65084,13 +92223,23 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( }) : function(o, v) { o["default"] = v; }); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); Object.defineProperty(exports, "__esModule", ({ value: true })); exports.NullStateProvider = exports.StateProvider = void 0; const core = __importStar(__nccwpck_require__(7484)); @@ -65159,15 +92308,33 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( }) : function(o, v) { o["default"] = v; }); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.isCacheFeatureAvailable = exports.getInputAsBool = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.isExactKeyMatch = exports.isGhes = void 0; +exports.isGhes = isGhes; +exports.isExactKeyMatch = isExactKeyMatch; +exports.logWarning = logWarning; +exports.isValidEvent = isValidEvent; +exports.getInputAsArray = getInputAsArray; +exports.getInputAsInt = getInputAsInt; +exports.getInputAsBool = getInputAsBool; +exports.isGCSAvailable = isGCSAvailable; +exports.isCacheFeatureAvailable = isCacheFeatureAvailable; const cache = __importStar(__nccwpck_require__(5116)); const core = __importStar(__nccwpck_require__(7484)); const constants_1 = __nccwpck_require__(7242); @@ -65179,25 +92346,21 @@ function isGhes() { const isLocalHost = hostname.endsWith(".LOCALHOST"); return !isGitHubHost && !isGitHubEnterpriseCloudHost && !isLocalHost; } -exports.isGhes = isGhes; function isExactKeyMatch(key, cacheKey) { return !!(cacheKey && cacheKey.localeCompare(key, undefined, { sensitivity: "accent" }) === 0); } -exports.isExactKeyMatch = isExactKeyMatch; function logWarning(message) { const warningPrefix = "[warning]"; core.info(`${warningPrefix}${message}`); } -exports.logWarning = logWarning; // Cache token authorized for all events that are tied to a ref // See GitHub Context https://help.github.com/actions/automating-your-workflow-with-github-actions/contexts-and-expression-syntax-for-github-actions#github-context function isValidEvent() { return constants_1.RefKey in process.env && Boolean(process.env[constants_1.RefKey]); } -exports.isValidEvent = isValidEvent; function getInputAsArray(name, options) { return core .getInput(name, options) @@ -65205,7 +92368,6 @@ function getInputAsArray(name, options) { .map(s => s.replace(/^!\s+/, "!").trim()) .filter(x => x !== ""); } -exports.getInputAsArray = getInputAsArray; function getInputAsInt(name, options) { const value = parseInt(core.getInput(name, options)); if (isNaN(value) || value < 0) { @@ -65213,13 +92375,31 @@ function getInputAsInt(name, options) { } return value; } -exports.getInputAsInt = getInputAsInt; function getInputAsBool(name, options) { const result = core.getInput(name, options); return result.toLowerCase() === "true"; } -exports.getInputAsBool = getInputAsBool; +// Check if GCS is configured and available +function isGCSAvailable() { + try { + const bucket = core.getInput(constants_1.Inputs.GCSBucket); + if (!bucket) { + core.info("GCS bucket name not provided, falling back to GitHub cache"); + return false; + } + return true; + } + catch (error) { + logWarning(`Failed to check GCS availability: ${error.message}`); + return false; + } +} function isCacheFeatureAvailable() { + // Check if GCS cache is available + if (isGCSAvailable()) { + return true; + } + // Otherwise, check GitHub cache if (cache.isFeatureAvailable()) { return true; } @@ -65231,7 +92411,242 @@ Otherwise please upgrade to GHES version >= 3.5 and If you are also using Github logWarning("An internal error has occurred in cache backend. Please check https://www.githubstatus.com/ for any ongoing issue in actions."); return false; } -exports.isCacheFeatureAvailable = isCacheFeatureAvailable; + + +/***/ }), + +/***/ 9614: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.restoreCache = restoreCache; +exports.saveCache = saveCache; +exports.isFeatureAvailable = isFeatureAvailable; +const core = __importStar(__nccwpck_require__(7484)); +const path = __importStar(__nccwpck_require__(6928)); +const constants_1 = __nccwpck_require__(7242); +const actionUtils_1 = __nccwpck_require__(8270); +const utils = __importStar(__nccwpck_require__(8299)); +const storage_1 = __nccwpck_require__(8525); +const cache = __importStar(__nccwpck_require__(5116)); +const tar_1 = __nccwpck_require__(5321); +const DEFAULT_PATH_PREFIX = "github-cache"; +// Function to initialize GCS client using Application Default Credentials +function getGCSClient() { + try { + core.info("Initializing GCS client"); + return new storage_1.Storage(); + } + catch (error) { + core.warning(`Failed to initialize GCS client: ${error.message}`); + return null; + } +} +function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive) { + return __awaiter(this, void 0, void 0, function* () { + // Check if GCS is available + if ((0, actionUtils_1.isGCSAvailable)()) { + try { + const result = yield restoreFromGCS(paths, primaryKey, restoreKeys, options); + if (result) { + core.info(`Cache restored from GCS with key: ${result}`); + return result; + } + core.info("Cache not found in GCS, falling back to GitHub cache"); + } + catch (error) { + core.warning(`Failed to restore from GCS: ${error.message}`); + core.info("Falling back to GitHub cache"); + } + } + // Fall back to GitHub cache + return yield cache.restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive); + }); +} +function saveCache(paths, key, options, enableCrossOsArchive) { + return __awaiter(this, void 0, void 0, function* () { + if ((0, actionUtils_1.isGCSAvailable)()) { + try { + const result = yield saveToGCS(paths, key); + if (result) { + core.info(`Cache saved to GCS with key: ${key}`); + return result; // Success ID + } + core.warning("Failed to save to GCS, falling back to GitHub cache"); + } + catch (error) { + core.warning(`Failed to save to GCS: ${error.message}`); + core.info("Falling back to GitHub cache"); + } + } + // Fall back to GitHub cache + return yield cache.saveCache(paths, key, options, enableCrossOsArchive); + }); +} +// Function that checks if the cache feature is available (either GCS or GitHub cache) +function isFeatureAvailable() { + return (0, actionUtils_1.isGCSAvailable)() || cache.isFeatureAvailable(); +} +function restoreFromGCS(_paths_1, primaryKey_1) { + return __awaiter(this, arguments, void 0, function* (_paths, // validate paths? + primaryKey, restoreKeys = [], options) { + const storage = getGCSClient(); + if (!storage) { + return undefined; + } + const bucket = core.getInput(constants_1.Inputs.GCSBucket); + const pathPrefix = core.getInput(constants_1.Inputs.GCSPathPrefix) || DEFAULT_PATH_PREFIX; + const compressionMethod = yield utils.getCompressionMethod(); + const archiveFolder = yield utils.createTempDirectory(); + const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + const keys = [primaryKey, ...restoreKeys]; + const gcsPath = yield findFileOnGCS(storage, bucket, pathPrefix, keys, compressionMethod); + if (!gcsPath) { + core.info(`No matching cache found`); + return undefined; + } + // If lookup only, just return the key + if (options === null || options === void 0 ? void 0 : options.lookupOnly) { + core.info(`Cache found in GCS with key: ${gcsPath}`); + return gcsPath; + } + try { + core.info(`Downloading from GCS: ${bucket}/${gcsPath}`); + const file = storage.bucket(bucket).file(gcsPath); + yield file.download({ destination: archivePath }); + if (core.isDebug()) { + yield (0, tar_1.listTar)(archivePath, compressionMethod); + } + const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); + core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + yield (0, tar_1.extractTar)(archivePath, compressionMethod); + core.info('Cache restored successfully'); + return gcsPath; + } + catch (error) { + core.warning(`Failed to restore: ${error.message}`); + } + finally { + try { + yield utils.unlinkFile(archivePath); + } + catch (error) { + core.debug(`Failed to delete archive: ${error}`); + } + } + }); +} +function getGCSPath(pathPrefix, key, compressionMethod) { + return `${pathPrefix}/${key}.${utils.getCacheFileName(compressionMethod)}`; +} +function saveToGCS(paths, key) { + return __awaiter(this, void 0, void 0, function* () { + let cacheId = -1; + const storage = getGCSClient(); + if (!storage) { + return cacheId; + } + const bucket = core.getInput(constants_1.Inputs.GCSBucket); + const pathPrefix = core.getInput(constants_1.Inputs.GCSPathPrefix) || DEFAULT_PATH_PREFIX; + const compressionMethod = yield utils.getCompressionMethod(); + const cachePaths = yield utils.resolvePaths(paths); + core.debug('Cache Paths:'); + core.debug(`${JSON.stringify(cachePaths)}`); + if (cachePaths.length === 0) { + throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); + } + const archiveFolder = yield utils.createTempDirectory(); + const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + core.debug(`Archive Path: ${archivePath}`); + try { + yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); + if (core.isDebug()) { + yield (0, tar_1.listTar)(archivePath, compressionMethod); + } + const gcsPath = getGCSPath(pathPrefix, key, compressionMethod); + core.info(`Uploading to GCS: ${bucket}/${gcsPath}`); + yield storage.bucket(bucket).upload(archivePath, { + destination: gcsPath, + resumable: true, // this may not be necessary + }); + return 1; + } + catch (error) { + core.warning(`Error creating or uploading cache: ${error.message}`); + return -1; + } + finally { + try { + yield utils.unlinkFile(archivePath); + } + catch (error) { + core.debug(`Failed to delete archive: ${error}`); + } + } + }); +} +function findFileOnGCS(storage, bucket, pathPrefix, keys, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + for (const key of keys) { + const gcsPath = getGCSPath(pathPrefix, key, compressionMethod); + if (yield checkFileExists(storage, bucket, gcsPath)) { + core.info(`Found file on bucket: ${bucket} with key: ${gcsPath}`); + return gcsPath; + } + } + return undefined; + }); +} +function checkFileExists(storage, bucket, path) { + return __awaiter(this, void 0, void 0, function* () { + const [exists] = yield storage.bucket(bucket).file(path).exists(); + return exists; + }); +} /***/ }), @@ -65316,6 +92731,30 @@ module.exports = require("net"); /***/ }), +/***/ 8474: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:events"); + +/***/ }), + +/***/ 1708: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:process"); + +/***/ }), + +/***/ 7975: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:util"); + +/***/ }), + /***/ 857: /***/ ((module) => { @@ -65340,6 +92779,14 @@ module.exports = require("punycode"); /***/ }), +/***/ 3480: +/***/ ((module) => { + +"use strict"; +module.exports = require("querystring"); + +/***/ }), + /***/ 2203: /***/ ((module) => { @@ -65372,6 +92819,14 @@ module.exports = require("tls"); /***/ }), +/***/ 2018: +/***/ ((module) => { + +"use strict"; +module.exports = require("tty"); + +/***/ }), + /***/ 7016: /***/ ((module) => { @@ -65394,6 +92849,14404 @@ module.exports = require("util"); "use strict"; module.exports = require("zlib"); +/***/ }), + +/***/ 6637: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AclRoleAccessorMethods = exports.Acl = void 0; +const promisify_1 = __nccwpck_require__(206); +/** + * Attach functionality to a {@link Storage.acl} instance. This will add an + * object for each role group (owners, readers, and writers), with each object + * containing methods to add or delete a type of entity. + * + * As an example, here are a few methods that are created. + * + * myBucket.acl.readers.deleteGroup('groupId', function(err) {}); + * + * myBucket.acl.owners.addUser('email@example.com', function(err, acl) {}); + * + * myBucket.acl.writers.addDomain('example.com', function(err, acl) {}); + * + * @private + */ +class AclRoleAccessorMethods { + constructor() { + this.owners = {}; + this.readers = {}; + this.writers = {}; + /** + * An object of convenience methods to add or delete owner ACL permissions + * for a given entity. + * + * The supported methods include: + * + * - `myFile.acl.owners.addAllAuthenticatedUsers` + * - `myFile.acl.owners.deleteAllAuthenticatedUsers` + * - `myFile.acl.owners.addAllUsers` + * - `myFile.acl.owners.deleteAllUsers` + * - `myFile.acl.owners.addDomain` + * - `myFile.acl.owners.deleteDomain` + * - `myFile.acl.owners.addGroup` + * - `myFile.acl.owners.deleteGroup` + * - `myFile.acl.owners.addProject` + * - `myFile.acl.owners.deleteProject` + * - `myFile.acl.owners.addUser` + * - `myFile.acl.owners.deleteUser` + * + * @name Acl#owners + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * //- + * // Add a user as an owner of a file. + * //- + * const myBucket = gcs.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * myFile.acl.owners.addUser('email@example.com', function(err, aclObject) + * {}); + * + * //- + * // For reference, the above command is the same as running the following. + * //- + * myFile.acl.add({ + * entity: 'user-email@example.com', + * role: gcs.acl.OWNER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.owners.addUser('email@example.com').then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + this.owners = {}; + /** + * An object of convenience methods to add or delete reader ACL permissions + * for a given entity. + * + * The supported methods include: + * + * - `myFile.acl.readers.addAllAuthenticatedUsers` + * - `myFile.acl.readers.deleteAllAuthenticatedUsers` + * - `myFile.acl.readers.addAllUsers` + * - `myFile.acl.readers.deleteAllUsers` + * - `myFile.acl.readers.addDomain` + * - `myFile.acl.readers.deleteDomain` + * - `myFile.acl.readers.addGroup` + * - `myFile.acl.readers.deleteGroup` + * - `myFile.acl.readers.addProject` + * - `myFile.acl.readers.deleteProject` + * - `myFile.acl.readers.addUser` + * - `myFile.acl.readers.deleteUser` + * + * @name Acl#readers + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * //- + * // Add a user as a reader of a file. + * //- + * myFile.acl.readers.addUser('email@example.com', function(err, aclObject) + * {}); + * + * //- + * // For reference, the above command is the same as running the following. + * //- + * myFile.acl.add({ + * entity: 'user-email@example.com', + * role: gcs.acl.READER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.readers.addUser('email@example.com').then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + this.readers = {}; + /** + * An object of convenience methods to add or delete writer ACL permissions + * for a given entity. + * + * The supported methods include: + * + * - `myFile.acl.writers.addAllAuthenticatedUsers` + * - `myFile.acl.writers.deleteAllAuthenticatedUsers` + * - `myFile.acl.writers.addAllUsers` + * - `myFile.acl.writers.deleteAllUsers` + * - `myFile.acl.writers.addDomain` + * - `myFile.acl.writers.deleteDomain` + * - `myFile.acl.writers.addGroup` + * - `myFile.acl.writers.deleteGroup` + * - `myFile.acl.writers.addProject` + * - `myFile.acl.writers.deleteProject` + * - `myFile.acl.writers.addUser` + * - `myFile.acl.writers.deleteUser` + * + * @name Acl#writers + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * //- + * // Add a user as a writer of a file. + * //- + * myFile.acl.writers.addUser('email@example.com', function(err, aclObject) + * {}); + * + * //- + * // For reference, the above command is the same as running the following. + * //- + * myFile.acl.add({ + * entity: 'user-email@example.com', + * role: gcs.acl.WRITER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.writers.addUser('email@example.com').then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + this.writers = {}; + AclRoleAccessorMethods.roles.forEach(this._assignAccessMethods.bind(this)); + } + _assignAccessMethods(role) { + const accessMethods = AclRoleAccessorMethods.accessMethods; + const entities = AclRoleAccessorMethods.entities; + const roleGroup = role.toLowerCase() + 's'; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this[roleGroup] = entities.reduce((acc, entity) => { + const isPrefix = entity.charAt(entity.length - 1) === '-'; + accessMethods.forEach(accessMethod => { + let method = accessMethod + entity[0].toUpperCase() + entity.substring(1); + if (isPrefix) { + method = method.replace('-', ''); + } + // Wrap the parent accessor method (e.g. `add` or `delete`) to avoid the + // more complex API of specifying an `entity` and `role`. + // eslint-disable-next-line @typescript-eslint/no-explicit-any + acc[method] = (entityId, options, callback) => { + let apiEntity; + if (typeof options === 'function') { + callback = options; + options = {}; + } + if (isPrefix) { + apiEntity = entity + entityId; + } + else { + // If the entity is not a prefix, it is a special entity group + // that does not require further details. The accessor methods + // only accept a callback. + apiEntity = entity; + callback = entityId; + } + options = Object.assign({ + entity: apiEntity, + role, + }, options); + const args = [options]; + if (typeof callback === 'function') { + args.push(callback); + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + return this[accessMethod].apply(this, args); + }; + }); + return acc; + }, {}); + } +} +exports.AclRoleAccessorMethods = AclRoleAccessorMethods; +AclRoleAccessorMethods.accessMethods = ['add', 'delete']; +AclRoleAccessorMethods.entities = [ + // Special entity groups that do not require further specification. + 'allAuthenticatedUsers', + 'allUsers', + // Entity groups that require specification, e.g. `user-email@example.com`. + 'domain-', + 'group-', + 'project-', + 'user-', +]; +AclRoleAccessorMethods.roles = ['OWNER', 'READER', 'WRITER']; +/** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * An ACL consists of one or more entries, where each entry grants permissions + * to an entity. Permissions define the actions that can be performed against an + * object or bucket (for example, `READ` or `WRITE`); the entity defines who the + * permission applies to (for example, a specific user or group of users). + * + * Where an `entity` value is accepted, we follow the format the Cloud Storage + * API expects. + * + * Refer to + * https://cloud.google.com/storage/docs/json_api/v1/defaultObjectAccessControls + * for the most up-to-date values. + * + * - `user-userId` + * - `user-email` + * - `group-groupId` + * - `group-email` + * - `domain-domain` + * - `project-team-projectId` + * - `allUsers` + * - `allAuthenticatedUsers` + * + * Examples: + * + * - The user "liz@example.com" would be `user-liz@example.com`. + * - The group "example@googlegroups.com" would be + * `group-example@googlegroups.com`. + * - To refer to all members of the Google Apps for Business domain + * "example.com", the entity would be `domain-example.com`. + * + * For more detailed information, see + * {@link http://goo.gl/6qBBPO| About Access Control Lists}. + * + * @constructor Acl + * @mixin + * @param {object} options Configuration options. + */ +class Acl extends AclRoleAccessorMethods { + constructor(options) { + super(); + this.pathPrefix = options.pathPrefix; + this.request_ = options.request; + } + /** + * @typedef {array} AddAclResponse + * @property {object} 0 The Acl Objects. + * @property {object} 1 The full API response. + */ + /** + * @callback AddAclCallback + * @param {?Error} err Request error, if any. + * @param {object} acl The Acl Objects. + * @param {object} apiResponse The full API response. + */ + /** + * Add access controls on a {@link Bucket} or {@link File}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/insert| BucketAccessControls: insert API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/insert| ObjectAccessControls: insert API Documentation} + * + * @param {object} options Configuration options. + * @param {string} options.entity Whose permissions will be added. + * @param {string} options.role Permissions allowed for the defined entity. + * See {@link https://cloud.google.com/storage/docs/access-control Access + * Control}. + * @param {number} [options.generation] **File Objects Only** Select a specific + * revision of this file (as opposed to the latest version, the default). + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {AddAclCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * const options = { + * entity: 'user-useremail@example.com', + * role: gcs.acl.OWNER_ROLE + * }; + * + * myBucket.acl.add(options, function(err, aclObject, apiResponse) {}); + * + * //- + * // For file ACL operations, you can also specify a `generation` property. + * // Here is how you would grant ownership permissions to a user on a + * specific + * // revision of a file. + * //- + * myFile.acl.add({ + * entity: 'user-useremail@example.com', + * role: gcs.acl.OWNER_ROLE, + * generation: 1 + * }, function(err, aclObject, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_add_file_owner + * Example of adding an owner to a file: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_owner + * Example of adding an owner to a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_default_owner + * Example of adding a default owner to a bucket: + */ + add(options, callback) { + const query = {}; + if (options.generation) { + query.generation = options.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + this.request({ + method: 'POST', + uri: '', + qs: query, + maxRetries: 0, //explicitly set this value since this is a non-idempotent function + json: { + entity: options.entity, + role: options.role.toUpperCase(), + }, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + callback(null, this.makeAclObject_(resp), resp); + }); + } + /** + * @typedef {array} RemoveAclResponse + * @property {object} 0 The full API response. + */ + /** + * @callback RemoveAclCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Delete access controls on a {@link Bucket} or {@link File}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/delete| BucketAccessControls: delete API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/delete| ObjectAccessControls: delete API Documentation} + * + * @param {object} options Configuration object. + * @param {string} options.entity Whose permissions will be revoked. + * @param {int} [options.generation] **File Objects Only** Select a specific + * revision of this file (as opposed to the latest version, the default). + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {RemoveAclCallback} callback The callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * myBucket.acl.delete({ + * entity: 'user-useremail@example.com' + * }, function(err, apiResponse) {}); + * + * //- + * // For file ACL operations, you can also specify a `generation` property. + * //- + * myFile.acl.delete({ + * entity: 'user-useremail@example.com', + * generation: 1 + * }, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.delete().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_owner + * Example of removing an owner from a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_default_owner + * Example of removing a default owner from a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_file_owner + * Example of removing an owner from a bucket: + */ + delete(options, callback) { + const query = {}; + if (options.generation) { + query.generation = options.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + this.request({ + method: 'DELETE', + uri: '/' + encodeURIComponent(options.entity), + qs: query, + }, (err, resp) => { + callback(err, resp); + }); + } + /** + * @typedef {array} GetAclResponse + * @property {object|object[]} 0 Single or array of Acl Objects. + * @property {object} 1 The full API response. + */ + /** + * @callback GetAclCallback + * @param {?Error} err Request error, if any. + * @param {object|object[]} acl Single or array of Acl Objects. + * @param {object} apiResponse The full API response. + */ + /** + * Get access controls on a {@link Bucket} or {@link File}. If + * an entity is omitted, you will receive an array of all applicable access + * controls. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/get| BucketAccessControls: get API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/get| ObjectAccessControls: get API Documentation} + * + * @param {object|function} [options] Configuration options. If you want to + * receive a list of all access controls, pass the callback function as + * the only argument. + * @param {string} options.entity Whose permissions will be fetched. + * @param {number} [options.generation] **File Objects Only** Select a specific + * revision of this file (as opposed to the latest version, the default). + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetAclCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * myBucket.acl.get({ + * entity: 'user-useremail@example.com' + * }, function(err, aclObject, apiResponse) {}); + * + * //- + * // Get all access controls. + * //- + * myBucket.acl.get(function(err, aclObjects, apiResponse) { + * // aclObjects = [ + * // { + * // entity: 'user-useremail@example.com', + * // role: 'owner' + * // } + * // ] + * }); + * + * //- + * // For file ACL operations, you can also specify a `generation` property. + * //- + * myFile.acl.get({ + * entity: 'user-useremail@example.com', + * generation: 1 + * }, function(err, aclObject, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.acl.get().then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_print_file_acl + * Example of printing a file's ACL: + * + * @example include:samples/acl.js + * region_tag:storage_print_file_acl_for_user + * Example of printing a file's ACL for a specific user: + * + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl + * Example of printing a bucket's ACL: + * + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl_for_user + * Example of printing a bucket's ACL for a specific user: + */ + get(optionsOrCallback, cb) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : null; + const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + let path = ''; + const query = {}; + if (options) { + path = '/' + encodeURIComponent(options.entity); + if (options.generation) { + query.generation = options.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + } + this.request({ + uri: path, + qs: query, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + let results; + if (resp.items) { + results = resp.items.map(this.makeAclObject_); + } + else { + results = this.makeAclObject_(resp); + } + callback(null, results, resp); + }); + } + /** + * @typedef {array} UpdateAclResponse + * @property {object} 0 The updated Acl Objects. + * @property {object} 1 The full API response. + */ + /** + * @callback UpdateAclCallback + * @param {?Error} err Request error, if any. + * @param {object} acl The updated Acl Objects. + * @param {object} apiResponse The full API response. + */ + /** + * Update access controls on a {@link Bucket} or {@link File}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/update| BucketAccessControls: update API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/update| ObjectAccessControls: update API Documentation} + * + * @param {object} options Configuration options. + * @param {string} options.entity Whose permissions will be updated. + * @param {string} options.role Permissions allowed for the defined entity. + * See {@link Storage.acl}. + * @param {number} [options.generation] **File Objects Only** Select a specific + * revision of this file (as opposed to the latest version, the default). + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {UpdateAclCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * const options = { + * entity: 'user-useremail@example.com', + * role: gcs.acl.WRITER_ROLE + * }; + * + * myBucket.acl.update(options, function(err, aclObject, apiResponse) {}); + * + * //- + * // For file ACL operations, you can also specify a `generation` property. + * //- + * myFile.acl.update({ + * entity: 'user-useremail@example.com', + * role: gcs.acl.WRITER_ROLE, + * generation: 1 + * }, function(err, aclObject, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.update(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + update(options, callback) { + const query = {}; + if (options.generation) { + query.generation = options.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + this.request({ + method: 'PUT', + uri: '/' + encodeURIComponent(options.entity), + qs: query, + json: { + role: options.role.toUpperCase(), + }, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + callback(null, this.makeAclObject_(resp), resp); + }); + } + /** + * Transform API responses to a consistent object format. + * + * @private + */ + makeAclObject_(accessControlObject) { + const obj = { + entity: accessControlObject.entity, + role: accessControlObject.role, + }; + if (accessControlObject.projectTeam) { + obj.projectTeam = accessControlObject.projectTeam; + } + return obj; + } + /** + * Patch requests up to the bucket's request object. + * + * @private + * + * @param {string} method Action. + * @param {string} path Request path. + * @param {*} query Request query object. + * @param {*} body Request body contents. + * @param {function} callback Callback function. + */ + request(reqOpts, callback) { + reqOpts.uri = this.pathPrefix + reqOpts.uri; + this.request_(reqOpts, callback); + } +} +exports.Acl = Acl; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Acl, { + exclude: ['request'], +}); + + +/***/ }), + +/***/ 2887: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Bucket = exports.BucketExceptionMessages = exports.AvailableServiceObjectMethods = exports.BucketActionToHTTPMethod = void 0; +const index_js_1 = __nccwpck_require__(1325); +const paginator_1 = __nccwpck_require__(9469); +const promisify_1 = __nccwpck_require__(206); +const fs = __importStar(__nccwpck_require__(9896)); +const mime_1 = __importDefault(__nccwpck_require__(4900)); +const path = __importStar(__nccwpck_require__(6928)); +const p_limit_1 = __importDefault(__nccwpck_require__(9977)); +const util_1 = __nccwpck_require__(9023); +const async_retry_1 = __importDefault(__nccwpck_require__(5195)); +const util_js_1 = __nccwpck_require__(4557); +const acl_js_1 = __nccwpck_require__(6637); +const file_js_1 = __nccwpck_require__(9975); +const iam_js_1 = __nccwpck_require__(2880); +const notification_js_1 = __nccwpck_require__(8598); +const storage_js_1 = __nccwpck_require__(2848); +const signer_js_1 = __nccwpck_require__(7319); +const stream_1 = __nccwpck_require__(2203); +const url_1 = __nccwpck_require__(7016); +var BucketActionToHTTPMethod; +(function (BucketActionToHTTPMethod) { + BucketActionToHTTPMethod["list"] = "GET"; +})(BucketActionToHTTPMethod || (exports.BucketActionToHTTPMethod = BucketActionToHTTPMethod = {})); +var AvailableServiceObjectMethods; +(function (AvailableServiceObjectMethods) { + AvailableServiceObjectMethods[AvailableServiceObjectMethods["setMetadata"] = 0] = "setMetadata"; + AvailableServiceObjectMethods[AvailableServiceObjectMethods["delete"] = 1] = "delete"; +})(AvailableServiceObjectMethods || (exports.AvailableServiceObjectMethods = AvailableServiceObjectMethods = {})); +var BucketExceptionMessages; +(function (BucketExceptionMessages) { + BucketExceptionMessages["PROVIDE_SOURCE_FILE"] = "You must provide at least one source file."; + BucketExceptionMessages["DESTINATION_FILE_NOT_SPECIFIED"] = "A destination file must be specified."; + BucketExceptionMessages["CHANNEL_ID_REQUIRED"] = "An ID is required to create a channel."; + BucketExceptionMessages["TOPIC_NAME_REQUIRED"] = "A valid topic name is required."; + BucketExceptionMessages["CONFIGURATION_OBJECT_PREFIX_REQUIRED"] = "A configuration object with a prefix is required."; + BucketExceptionMessages["SPECIFY_FILE_NAME"] = "A file name must be specified."; + BucketExceptionMessages["METAGENERATION_NOT_PROVIDED"] = "A metageneration must be provided."; + BucketExceptionMessages["SUPPLY_NOTIFICATION_ID"] = "You must supply a notification ID."; +})(BucketExceptionMessages || (exports.BucketExceptionMessages = BucketExceptionMessages = {})); +/** + * @callback Crc32cGeneratorToStringCallback + * A method returning the CRC32C as a base64-encoded string. + * + * @returns {string} + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ +/** + * @callback Crc32cGeneratorValidateCallback + * A method validating a base64-encoded CRC32C string. + * + * @param {string} [value] base64-encoded CRC32C string to validate + * @returns {boolean} + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + **/ +/** + * @callback Crc32cGeneratorUpdateCallback + * A method for passing `Buffer`s for CRC32C generation. + * + * @param {Buffer} [data] data to update CRC32C value with + * @returns {undefined} + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + **/ +/** + * @typedef {object} CRC32CValidator + * @property {Crc32cGeneratorToStringCallback} + * @property {Crc32cGeneratorValidateCallback} + * @property {Crc32cGeneratorUpdateCallback} + */ +/** + * A function that generates a CRC32C Validator. Defaults to {@link CRC32C} + * + * @name Bucket#crc32cGenerator + * @type {CRC32CValidator} + */ +/** + * Get and set IAM policies for your bucket. + * + * @name Bucket#iam + * @mixes Iam + * + * See {@link https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management| Cloud Storage IAM Management} + * See {@link https://cloud.google.com/iam/docs/granting-changing-revoking-access| Granting, Changing, and Revoking Access} + * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Get the IAM policy for your bucket. + * //- + * bucket.iam.getPolicy(function(err, policy) { + * console.log(policy); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.getPolicy().then(function(data) { + * const policy = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/iam.js + * region_tag:storage_view_bucket_iam_members + * Example of retrieving a bucket's IAM policy: + * + * @example include:samples/iam.js + * region_tag:storage_add_bucket_iam_member + * Example of adding to a bucket's IAM policy: + * + * @example include:samples/iam.js + * region_tag:storage_remove_bucket_iam_member + * Example of removing from a bucket's IAM policy: + */ +/** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * An ACL consists of one or more entries, where each entry grants permissions + * to an entity. Permissions define the actions that can be performed against + * an object or bucket (for example, `READ` or `WRITE`); the entity defines + * who the permission applies to (for example, a specific user or group of + * users). + * + * The `acl` object on a Bucket instance provides methods to get you a list of + * the ACLs defined on your bucket, as well as set, update, and delete them. + * + * Buckets also have + * {@link https://cloud.google.com/storage/docs/access-control/lists#default| default ACLs} + * for all created files. Default ACLs specify permissions that all new + * objects added to the bucket will inherit by default. You can add, delete, + * get, and update entities and permissions for these as well with + * {@link Bucket#acl.default}. + * + * See {@link http://goo.gl/6qBBPO| About Access Control Lists} + * See {@link https://cloud.google.com/storage/docs/access-control/lists#default| Default ACLs} + * + * @name Bucket#acl + * @mixes Acl + * @property {Acl} default Cloud Storage Buckets have + * {@link https://cloud.google.com/storage/docs/access-control/lists#default| default ACLs} + * for all created files. You can add, delete, get, and update entities and + * permissions for these as well. The method signatures and examples are all + * the same, after only prefixing the method call with `default`. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Make a bucket's contents publicly readable. + * //- + * const myBucket = storage.bucket('my-bucket'); + * + * const options = { + * entity: 'allUsers', + * role: storage.acl.READER_ROLE + * }; + * + * myBucket.acl.add(options, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl + * Example of printing a bucket's ACL: + * + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl_for_user + * Example of printing a bucket's ACL for a specific user: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_owner + * Example of adding an owner to a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_owner + * Example of removing an owner from a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_default_owner + * Example of adding a default owner to a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_default_owner + * Example of removing a default owner from a bucket: + */ +/** + * The API-formatted resource description of the bucket. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name Bucket#metadata + * @type {object} + */ +/** + * The bucket's name. + * @name Bucket#name + * @type {string} + */ +/** + * Get {@link File} objects for the files currently in the bucket as a + * readable object stream. + * + * @method Bucket#getFilesStream + * @param {GetFilesOptions} [query] Query object for listing files. + * @returns {ReadableStream} A readable stream that emits {@link File} instances. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getFilesStream() + * .on('error', console.error) + * .on('data', function(file) { + * // file is a File object. + * }) + * .on('end', function() { + * // All files retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * bucket.getFilesStream() + * .on('data', function(file) { + * this.end(); + * }); + * + * //- + * // If you're filtering files with a delimiter, you should use + * // {@link Bucket#getFiles} and set `autoPaginate: false` in order to + * // preserve the `apiResponse` argument. + * //- + * const prefixes = []; + * + * function callback(err, files, nextQuery, apiResponse) { + * prefixes = prefixes.concat(apiResponse.prefixes); + * + * if (nextQuery) { + * bucket.getFiles(nextQuery, callback); + * } else { + * // prefixes = The finished array of prefixes. + * } + * } + * + * bucket.getFiles({ + * autoPaginate: false, + * delimiter: '/' + * }, callback); + * ``` + */ +/** + * Create a Bucket object to interact with a Cloud Storage bucket. + * + * @class + * @hideconstructor + * + * @param {Storage} storage A {@link Storage} instance. + * @param {string} name The name of the bucket. + * @param {object} [options] Configuration object. + * @param {string} [options.userProject] User project. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * ``` + */ +class Bucket extends index_js_1.ServiceObject { + // eslint-disable-next-line @typescript-eslint/no-unused-vars + getFilesStream(query) { + // placeholder body, overwritten in constructor + return new stream_1.Readable(); + } + constructor(storage, name, options) { + var _a, _b, _c, _d; + options = options || {}; + // Allow for "gs://"-style input, and strip any trailing slashes. + name = name.replace(/^gs:\/\//, '').replace(/\/+$/, ''); + const requestQueryObject = {}; + if ((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) { + requestQueryObject.ifGenerationMatch = + options.preconditionOpts.ifGenerationMatch; + } + if ((_b = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationNotMatch) { + requestQueryObject.ifGenerationNotMatch = + options.preconditionOpts.ifGenerationNotMatch; + } + if ((_c = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _c === void 0 ? void 0 : _c.ifMetagenerationMatch) { + requestQueryObject.ifMetagenerationMatch = + options.preconditionOpts.ifMetagenerationMatch; + } + if ((_d = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _d === void 0 ? void 0 : _d.ifMetagenerationNotMatch) { + requestQueryObject.ifMetagenerationNotMatch = + options.preconditionOpts.ifMetagenerationNotMatch; + } + const userProject = options.userProject; + if (typeof userProject === 'string') { + requestQueryObject.userProject = userProject; + } + const methods = { + /** + * Create a bucket. + * + * @method Bucket#create + * @param {CreateBucketRequest} [metadata] Metadata to set for the bucket. + * @param {CreateBucketCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * bucket.create(function(err, bucket, apiResponse) { + * if (!err) { + * // The bucket was created successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.create().then(function(data) { + * const bucket = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + create: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * IamDeleteBucketOptions Configuration options. + * @property {boolean} [ignoreNotFound = false] Ignore an error if + * the bucket does not exist. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} DeleteBucketResponse + * @property {object} 0 The full API response. + */ + /** + * @callback DeleteBucketCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Delete the bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/delete| Buckets: delete API Documentation} + * + * @method Bucket#delete + * @param {DeleteBucketOptions} [options] Configuration options. + * @param {boolean} [options.ignoreNotFound = false] Ignore an error if + * the bucket does not exist. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {DeleteBucketCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * bucket.delete(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.delete().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/buckets.js + * region_tag:storage_delete_bucket + * Another example: + */ + delete: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {object} BucketExistsOptions Configuration options for Bucket#exists(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} BucketExistsResponse + * @property {boolean} 0 Whether the {@link Bucket} exists. + */ + /** + * @callback BucketExistsCallback + * @param {?Error} err Request error, if any. + * @param {boolean} exists Whether the {@link Bucket} exists. + */ + /** + * Check if the bucket exists. + * + * @method Bucket#exists + * @param {BucketExistsOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {BucketExistsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.exists(function(err, exists) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.exists().then(function(data) { + * const exists = data[0]; + * }); + * ``` + */ + exists: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {object} [GetBucketOptions] Configuration options for Bucket#get() + * @property {boolean} [autoCreate] Automatically create the object if + * it does not exist. Default: `false` + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} GetBucketResponse + * @property {Bucket} 0 The {@link Bucket}. + * @property {object} 1 The full API response. + */ + /** + * @callback GetBucketCallback + * @param {?Error} err Request error, if any. + * @param {Bucket} bucket The {@link Bucket}. + * @param {object} apiResponse The full API response. + */ + /** + * Get a bucket if it exists. + * + * You may optionally use this to "get or create" an object by providing + * an object with `autoCreate` set to `true`. Any extra configuration that + * is normally required for the `create` method must be contained within + * this object as well. + * + * @method Bucket#get + * @param {GetBucketOptions} [options] Configuration options. + * @param {boolean} [options.autoCreate] Automatically create the object if + * it does not exist. Default: `false` + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetBucketCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.get(function(err, bucket, apiResponse) { + * // `bucket.metadata` has been populated. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.get().then(function(data) { + * const bucket = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + get: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} GetBucketMetadataResponse + * @property {object} 0 The bucket metadata. + * @property {object} 1 The full API response. + */ + /** + * @callback GetBucketMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata The bucket metadata. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} GetBucketMetadataOptions Configuration options for Bucket#getMetadata(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Get the bucket's metadata. + * + * To set metadata, see {@link Bucket#setMetadata}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/get| Buckets: get API Documentation} + * + * @method Bucket#getMetadata + * @param {GetBucketMetadataOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetBucketMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getMetadata(function(err, metadata, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getMetadata().then(function(data) { + * const metadata = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/requesterPays.js + * region_tag:storage_get_requester_pays_status + * Example of retrieving the requester pays status of a bucket: + */ + getMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {object} SetBucketMetadataOptions Configuration options for Bucket#setMetadata(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} SetBucketMetadataResponse + * @property {object} apiResponse The full API response. + */ + /** + * @callback SetBucketMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata The bucket metadata. + */ + /** + * Set the bucket's metadata. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} + * + * @method Bucket#setMetadata + * @param {object} metadata The metadata you wish to set. + * @param {SetBucketMetadataOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Set website metadata field on the bucket. + * //- + * const metadata = { + * website: { + * mainPageSuffix: 'http://example.com', + * notFoundPage: 'http://example.com/404.html' + * } + * }; + * + * bucket.setMetadata(metadata, function(err, apiResponse) {}); + * + * //- + * // Enable versioning for your bucket. + * //- + * bucket.setMetadata({ + * versioning: { + * enabled: true + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Enable KMS encryption for objects within this bucket. + * //- + * bucket.setMetadata({ + * encryption: { + * defaultKmsKeyName: 'projects/grape-spaceship-123/...' + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Set the default event-based hold value for new objects in this + * // bucket. + * //- + * bucket.setMetadata({ + * defaultEventBasedHold: true + * }, function(err, apiResponse) {}); + * + * //- + * // Remove object lifecycle rules. + * //- + * bucket.setMetadata({ + * lifecycle: null + * }, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setMetadata(metadata).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + setMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + }; + super({ + parent: storage, + baseUrl: '/b', + id: name, + createMethod: storage.createBucket.bind(storage), + methods, + }); + this.name = name; + this.storage = storage; + this.userProject = options.userProject; + this.acl = new acl_js_1.Acl({ + request: this.request.bind(this), + pathPrefix: '/acl', + }); + this.acl.default = new acl_js_1.Acl({ + request: this.request.bind(this), + pathPrefix: '/defaultObjectAcl', + }); + this.crc32cGenerator = + options.crc32cGenerator || this.storage.crc32cGenerator; + this.iam = new iam_js_1.Iam(this); + this.getFilesStream = paginator_1.paginator.streamify('getFiles'); + this.instanceRetryValue = storage.retryOptions.autoRetry; + this.instancePreconditionOpts = options === null || options === void 0 ? void 0 : options.preconditionOpts; + } + /** + * The bucket's Cloud Storage URI (`gs://`) + * + * @example + * ```ts + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * // `gs://my-bucket` + * const href = bucket.cloudStorageURI.href; + * ``` + */ + get cloudStorageURI() { + const uri = new url_1.URL('gs://'); + uri.host = this.name; + return uri; + } + /** + * @typedef {object} AddLifecycleRuleOptions Configuration options for Bucket#addLifecycleRule(). + * @property {boolean} [append=true] The new rules will be appended to any + * pre-existing rules. + */ + /** + * + * @typedef {object} LifecycleRule The new lifecycle rule to be added to objects + * in this bucket. + * @property {string|object} action The action to be taken upon matching of + * all the conditions 'delete', 'setStorageClass', or 'AbortIncompleteMultipartUpload'. + * **Note**: For configuring a raw-formatted rule object to be passed as `action` + * please refer to the [examples]{@link https://cloud.google.com/storage/docs/managing-lifecycles#configexamples}. + * @property {object} condition Condition a bucket must meet before the + * action occurs on the bucket. Refer to following supported [conditions]{@link https://cloud.google.com/storage/docs/lifecycle#conditions}. + * @property {string} [storageClass] When using the `setStorageClass` + * action, provide this option to dictate which storage class the object + * should update to. Please see + * [SetStorageClass option documentation]{@link https://cloud.google.com/storage/docs/lifecycle#setstorageclass} for supported transitions. + */ + /** + * Add an object lifecycle management rule to the bucket. + * + * By default, an Object Lifecycle Management rule provided to this method + * will be included to the existing policy. To replace all existing rules, + * supply the `options` argument, setting `append` to `false`. + * + * To add multiple rules, pass a list to the `rule` parameter. Calling this + * function multiple times asynchronously does not guarantee that all rules + * are added correctly. + * + * See {@link https://cloud.google.com/storage/docs/lifecycle| Object Lifecycle Management} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} + * + * @param {LifecycleRule|LifecycleRule[]} rule The new lifecycle rule or rules to be added to objects + * in this bucket. + * @param {string|object} rule.action The action to be taken upon matching of + * all the conditions 'delete', 'setStorageClass', or 'AbortIncompleteMultipartUpload'. + * **Note**: For configuring a raw-formatted rule object to be passed as `action` + * please refer to the [examples]{@link https://cloud.google.com/storage/docs/managing-lifecycles#configexamples}. + * @param {object} rule.condition Condition a bucket must meet before the + * action occurson the bucket. Refer to followitn supported [conditions]{@link https://cloud.google.com/storage/docs/lifecycle#conditions}. + * @param {string} [rule.storageClass] When using the `setStorageClass` + * action, provide this option to dictate which storage class the object + * should update to. + * @param {AddLifecycleRuleOptions} [options] Configuration object. + * @param {boolean} [options.append=true] Append the new rule to the existing + * policy. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Automatically have an object deleted from this bucket once it is 3 years + * // of age. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * age: 365 * 3 // Specified in days. + * } + * }, function(err, apiResponse) { + * if (err) { + * // Error handling omitted. + * } + * + * const lifecycleRules = bucket.metadata.lifecycle.rule; + * + * // Iterate over the Object Lifecycle Management rules on this bucket. + * lifecycleRules.forEach(lifecycleRule => {}); + * }); + * + * //- + * // By default, the rule you provide will be added to the existing policy. + * // Optionally, you can disable this behavior to replace all of the + * // pre-existing rules. + * //- + * const options = { + * append: false + * }; + * + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * age: 365 * 3 // Specified in days. + * } + * }, options, function(err, apiResponse) { + * if (err) { + * // Error handling omitted. + * } + * + * // All rules have been replaced with the new "delete" rule. + * + * // Iterate over the Object Lifecycle Management rules on this bucket. + * lifecycleRules.forEach(lifecycleRule => {}); + * }); + * + * //- + * // For objects created before 2018, "downgrade" the storage class. + * //- + * bucket.addLifecycleRule({ + * action: 'setStorageClass', + * storageClass: 'COLDLINE', + * condition: { + * createdBefore: new Date('2018') + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Delete objects created before 2016 which have the Coldline storage + * // class. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * matchesStorageClass: [ + * 'COLDLINE' + * ], + * createdBefore: new Date('2016') + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Delete object that has a noncurrent timestamp that is at least 100 days. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * daysSinceNoncurrentTime: 100 + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Delete object that has a noncurrent timestamp before 2020-01-01. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * noncurrentTimeBefore: new Date('2020-01-01') + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Delete object that has a customTime that is at least 100 days. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * daysSinceCustomTime: 100 + * } + * }, function(err, apiResponse) ()); + * + * //- + * // Delete object that has a customTime before 2020-01-01. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * customTimeBefore: new Date('2020-01-01') + * } + * }, function(err, apiResponse) {}); + * ``` + */ + addLifecycleRule(rule, optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + options = options || {}; + const rules = Array.isArray(rule) ? rule : [rule]; + for (const curRule of rules) { + if (curRule.condition.createdBefore instanceof Date) { + curRule.condition.createdBefore = curRule.condition.createdBefore + .toISOString() + .replace(/T.+$/, ''); + } + if (curRule.condition.customTimeBefore instanceof Date) { + curRule.condition.customTimeBefore = curRule.condition.customTimeBefore + .toISOString() + .replace(/T.+$/, ''); + } + if (curRule.condition.noncurrentTimeBefore instanceof Date) { + curRule.condition.noncurrentTimeBefore = + curRule.condition.noncurrentTimeBefore + .toISOString() + .replace(/T.+$/, ''); + } + } + if (options.append === false) { + this.setMetadata({ lifecycle: { rule: rules } }, options, callback); + return; + } + // The default behavior appends the previously-defined lifecycle rules with + // the new ones just passed in by the user. + this.getMetadata((err, metadata) => { + var _a, _b; + if (err) { + callback(err); + return; + } + const currentLifecycleRules = Array.isArray((_a = metadata.lifecycle) === null || _a === void 0 ? void 0 : _a.rule) + ? (_b = metadata.lifecycle) === null || _b === void 0 ? void 0 : _b.rule + : []; + this.setMetadata({ + lifecycle: { rule: currentLifecycleRules.concat(rules) }, + }, options, callback); + }); + } + /** + * @typedef {object} CombineOptions + * @property {string} [kmsKeyName] Resource name of the Cloud KMS key, of + * the form + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`, + * that will be used to encrypt the object. Overwrites the object + * metadata's `kms_key_name` value, if any. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback CombineCallback + * @param {?Error} err Request error, if any. + * @param {File} newFile The new {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} CombineResponse + * @property {File} 0 The new {@link File}. + * @property {object} 1 The full API response. + */ + /** + * Combine multiple files into one new file. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/compose| Objects: compose API Documentation} + * + * @throws {Error} if a non-array is provided as sources argument. + * @throws {Error} if no sources are provided. + * @throws {Error} if no destination is provided. + * + * @param {string[]|File[]} sources The source files that will be + * combined. + * @param {string|File} destination The file you would like the + * source files combined into. + * @param {CombineOptions} [options] Configuration options. + * @param {string} [options.kmsKeyName] Resource name of the Cloud KMS key, of + * the form + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`, + * that will be used to encrypt the object. Overwrites the object + * metadata's `kms_key_name` value, if any. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + + * @param {CombineCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const logBucket = storage.bucket('log-bucket'); + * + * const sources = [ + * logBucket.file('2013-logs.txt'), + * logBucket.file('2014-logs.txt') + * ]; + * + * const allLogs = logBucket.file('all-logs.txt'); + * + * logBucket.combine(sources, allLogs, function(err, newFile, apiResponse) { + * // newFile === allLogs + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * logBucket.combine(sources, allLogs).then(function(data) { + * const newFile = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + combine(sources, destination, optionsOrCallback, callback) { + var _a; + if (!Array.isArray(sources) || sources.length === 0) { + throw new Error(BucketExceptionMessages.PROVIDE_SOURCE_FILE); + } + if (!destination) { + throw new Error(BucketExceptionMessages.DESTINATION_FILE_NOT_SPECIFIED); + } + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, // Not relevant but param is required + AvailableServiceObjectMethods.setMetadata, // Same as above + options); + const convertToFile = (file) => { + if (file instanceof file_js_1.File) { + return file; + } + return this.file(file); + }; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + sources = sources.map(convertToFile); + const destinationFile = convertToFile(destination); + callback = callback || index_js_1.util.noop; + if (!destinationFile.metadata.contentType) { + const destinationContentType = mime_1.default.getType(destinationFile.name) || undefined; + if (destinationContentType) { + destinationFile.metadata.contentType = destinationContentType; + } + } + let maxRetries = this.storage.retryOptions.maxRetries; + if ((((_a = destinationFile === null || destinationFile === void 0 ? void 0 : destinationFile.instancePreconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === + undefined && + options.ifGenerationMatch === undefined && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + maxRetries = 0; + } + if (options.ifGenerationMatch === undefined) { + Object.assign(options, destinationFile.instancePreconditionOpts, options); + } + // Make the request from the destination File object. + destinationFile.request({ + method: 'POST', + uri: '/compose', + maxRetries, + json: { + destination: { + contentType: destinationFile.metadata.contentType, + contentEncoding: destinationFile.metadata.contentEncoding, + }, + sourceObjects: sources.map(source => { + const sourceObject = { + name: source.name, + }; + if (source.metadata && source.metadata.generation) { + sourceObject.generation = parseInt(source.metadata.generation.toString()); + } + return sourceObject; + }), + }, + qs: options, + }, (err, resp) => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + if (err) { + callback(err, null, resp); + return; + } + callback(null, destinationFile, resp); + }); + } + /** + * See a {@link https://cloud.google.com/storage/docs/json_api/v1/objects/watchAll| Objects: watchAll request body}. + * + * @typedef {object} CreateChannelConfig + * @property {string} address The address where notifications are + * delivered for this channel. + * @property {string} [delimiter] Returns results in a directory-like mode. + * @property {number} [maxResults] Maximum number of `items` plus `prefixes` + * to return in a single page of responses. + * @property {string} [pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @property {string} [prefix] Filter results to objects whose names begin + * with this prefix. + * @property {string} [projection=noAcl] Set of properties to return. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {boolean} [versions=false] If `true`, lists all versions of an object + * as distinct results. + */ + /** + * @typedef {object} CreateChannelOptions + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} CreateChannelResponse + * @property {Channel} 0 The new {@link Channel}. + * @property {object} 1 The full API response. + */ + /** + * @callback CreateChannelCallback + * @param {?Error} err Request error, if any. + * @param {Channel} channel The new {@link Channel}. + * @param {object} apiResponse The full API response. + */ + /** + * Create a channel that will be notified when objects in this bucket changes. + * + * @throws {Error} If an ID is not provided. + * @throws {Error} If an address is not provided. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/watchAll| Objects: watchAll API Documentation} + * + * @param {string} id The ID of the channel to create. + * @param {CreateChannelConfig} config Configuration for creating channel. + * @param {string} config.address The address where notifications are + * delivered for this channel. + * @param {string} [config.delimiter] Returns results in a directory-like mode. + * @param {number} [config.maxResults] Maximum number of `items` plus `prefixes` + * to return in a single page of responses. + * @param {string} [config.pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @param {string} [config.prefix] Filter results to objects whose names begin + * with this prefix. + * @param {string} [config.projection=noAcl] Set of properties to return. + * @param {string} [config.userProject] The ID of the project which will be + * billed for the request. + * @param {boolean} [config.versions=false] If `true`, lists all versions of an object + * as distinct results. + * @param {CreateChannelOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {CreateChannelCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const id = 'new-channel-id'; + * + * const config = { + * address: 'https://...' + * }; + * + * bucket.createChannel(id, config, function(err, channel, apiResponse) { + * if (!err) { + * // Channel created successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.createChannel(id, config).then(function(data) { + * const channel = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + createChannel(id, config, optionsOrCallback, callback) { + if (typeof id !== 'string') { + throw new Error(BucketExceptionMessages.CHANNEL_ID_REQUIRED); + } + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.request({ + method: 'POST', + uri: '/o/watch', + json: Object.assign({ + id, + type: 'web_hook', + }, config), + qs: options, + }, (err, apiResponse) => { + if (err) { + callback(err, null, apiResponse); + return; + } + const resourceId = apiResponse.resourceId; + const channel = this.storage.channel(id, resourceId); + channel.metadata = apiResponse; + callback(null, channel, apiResponse); + }); + } + /** + * Metadata to set for the Notification. + * + * @typedef {object} CreateNotificationOptions + * @property {object} [customAttributes] An optional list of additional + * attributes to attach to each Cloud PubSub message published for this + * notification subscription. + * @property {string[]} [eventTypes] If present, only send notifications about + * listed event types. If empty, sent notifications for all event types. + * @property {string} [objectNamePrefix] If present, only apply this + * notification configuration to object names that begin with this prefix. + * @property {string} [payloadFormat] The desired content of the Payload. + * Defaults to `JSON_API_V1`. + * + * Acceptable values are: + * - `JSON_API_V1` + * + * - `NONE` + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback CreateNotificationCallback + * @param {?Error} err Request error, if any. + * @param {Notification} notification The new {@link Notification}. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} CreateNotificationResponse + * @property {Notification} 0 The new {@link Notification}. + * @property {object} 1 The full API response. + */ + /** + * Creates a notification subscription for the bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/insert| Notifications: insert} + * + * @param {Topic|string} topic The Cloud PubSub topic to which this + * subscription publishes. If the project ID is omitted, the current + * project ID will be used. + * + * Acceptable formats are: + * - `projects/grape-spaceship-123/topics/my-topic` + * + * - `my-topic` + * @param {CreateNotificationOptions} [options] Metadata to set for the + * notification. + * @param {object} [options.customAttributes] An optional list of additional + * attributes to attach to each Cloud PubSub message published for this + * notification subscription. + * @param {string[]} [options.eventTypes] If present, only send notifications about + * listed event types. If empty, sent notifications for all event types. + * @param {string} [options.objectNamePrefix] If present, only apply this + * notification configuration to object names that begin with this prefix. + * @param {string} [options.payloadFormat] The desired content of the Payload. + * Defaults to `JSON_API_V1`. + * + * Acceptable values are: + * - `JSON_API_V1` + * + * - `NONE` + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {CreateNotificationCallback} [callback] Callback function. + * @returns {Promise} + * @throws {Error} If a valid topic is not provided. + * @see Notification#create + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const callback = function(err, notification, apiResponse) { + * if (!err) { + * // The notification was created successfully. + * } + * }; + * + * myBucket.createNotification('my-topic', callback); + * + * //- + * // Configure the nofiication by providing Notification metadata. + * //- + * const metadata = { + * objectNamePrefix: 'prefix-' + * }; + * + * myBucket.createNotification('my-topic', metadata, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.createNotification('my-topic').then(function(data) { + * const notification = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/createNotification.js + * region_tag:storage_create_bucket_notifications + * Another example: + */ + createNotification(topic, optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + const topicIsObject = topic !== null && typeof topic === 'object'; + if (topicIsObject && index_js_1.util.isCustomType(topic, 'pubsub/topic')) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + topic = topic.name; + } + if (typeof topic !== 'string') { + throw new Error(BucketExceptionMessages.TOPIC_NAME_REQUIRED); + } + const body = Object.assign({ topic }, options); + if (body.topic.indexOf('projects') !== 0) { + body.topic = 'projects/{{projectId}}/topics/' + body.topic; + } + body.topic = `//pubsub.${this.storage.universeDomain}/` + body.topic; + if (!body.payloadFormat) { + body.payloadFormat = 'JSON_API_V1'; + } + const query = {}; + if (body.userProject) { + query.userProject = body.userProject; + delete body.userProject; + } + this.request({ + method: 'POST', + uri: '/notificationConfigs', + json: (0, util_js_1.convertObjKeysToSnakeCase)(body), + qs: query, + maxRetries: 0, //explicitly set this value since this is a non-idempotent function + }, (err, apiResponse) => { + if (err) { + callback(err, null, apiResponse); + return; + } + const notification = this.notification(apiResponse.id); + notification.metadata = apiResponse; + callback(null, notification, apiResponse); + }); + } + /** + * @typedef {object} DeleteFilesOptions Query object. See {@link Bucket#getFiles} + * for all of the supported properties. + * @property {boolean} [force] Suppress errors until all files have been + * processed. + */ + /** + * @callback DeleteFilesCallback + * @param {?Error|?Error[]} err Request error, if any, or array of errors from + * files that were not able to be deleted. + * @param {object} [apiResponse] The full API response. + */ + /** + * Iterate over the bucket's files, calling `file.delete()` on each. + * + * This is not an atomic request. A delete attempt will be + * made for each file individually. Any one can fail, in which case only a + * portion of the files you intended to be deleted would have. + * + * Operations are performed in parallel, up to 10 at once. The first error + * breaks the loop and will execute the provided callback with it. Specify + * `{ force: true }` to suppress the errors until all files have had a chance + * to be processed. + * + * File preconditions cannot be passed to this function. It will not retry unless + * the idempotency strategy is set to retry always. + * + * The `query` object passed as the first argument will also be passed to + * {@link Bucket#getFiles}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/delete| Objects: delete API Documentation} + * + * @param {DeleteFilesOptions} [query] Query object. See {@link Bucket#getFiles} + * @param {boolean} [query.force] Suppress errors until all files have been + * processed. + * @param {DeleteFilesCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Delete all of the files in the bucket. + * //- + * bucket.deleteFiles(function(err) {}); + * + * //- + * // By default, if a file cannot be deleted, this method will stop deleting + * // files from your bucket. You can override this setting with `force: + * // true`. + * //- + * bucket.deleteFiles({ + * force: true + * }, function(errors) { + * // `errors`: + * // Array of errors if any occurred, otherwise null. + * }); + * + * //- + * // The first argument to this method acts as a query to + * // {@link Bucket#getFiles}. As an example, you can delete files + * // which match a prefix. + * //- + * bucket.deleteFiles({ + * prefix: 'images/' + * }, function(err) { + * if (!err) { + * // All files in the `images` directory have been deleted. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.deleteFiles().then(function() {}); + * ``` + */ + deleteFiles(queryOrCallback, callback) { + let query = {}; + if (typeof queryOrCallback === 'function') { + callback = queryOrCallback; + } + else if (queryOrCallback) { + query = queryOrCallback; + } + const MAX_PARALLEL_LIMIT = 10; + const MAX_QUEUE_SIZE = 1000; + const errors = []; + const deleteFile = (file) => { + return file.delete(query).catch(err => { + if (!query.force) { + throw err; + } + errors.push(err); + }); + }; + (async () => { + try { + let promises = []; + const limit = (0, p_limit_1.default)(MAX_PARALLEL_LIMIT); + const filesStream = this.getFilesStream(query); + for await (const curFile of filesStream) { + if (promises.length >= MAX_QUEUE_SIZE) { + await Promise.all(promises); + promises = []; + } + promises.push(limit(() => deleteFile(curFile)).catch(e => { + filesStream.destroy(); + throw e; + })); + } + await Promise.all(promises); + callback(errors.length > 0 ? errors : null); + } + catch (e) { + callback(e); + return; + } + })(); + } + /** + * @deprecated + * @typedef {array} DeleteLabelsResponse + * @property {object} 0 The full API response. + */ + /** + * @deprecated + * @callback DeleteLabelsCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata Bucket's metadata. + */ + /** + * @deprecated Use setMetadata directly + * Delete one or more labels from this bucket. + * + * @param {string|string[]} [labels] The labels to delete. If no labels are + * provided, all of the labels are removed. + * @param {DeleteLabelsCallback} [callback] Callback function. + * @param {DeleteLabelsOptions} [options] Options, including precondition options + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Delete all of the labels from this bucket. + * //- + * bucket.deleteLabels(function(err, apiResponse) {}); + * + * //- + * // Delete a single label. + * //- + * bucket.deleteLabels('labelone', function(err, apiResponse) {}); + * + * //- + * // Delete a specific set of labels. + * //- + * bucket.deleteLabels([ + * 'labelone', + * 'labeltwo' + * ], function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.deleteLabels().then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + deleteLabels(labelsOrCallbackOrOptions, optionsOrCallback, callback) { + let labels = new Array(); + let options = {}; + if (typeof labelsOrCallbackOrOptions === 'function') { + callback = labelsOrCallbackOrOptions; + } + else if (typeof labelsOrCallbackOrOptions === 'string') { + labels = [labelsOrCallbackOrOptions]; + } + else if (Array.isArray(labelsOrCallbackOrOptions)) { + labels = labelsOrCallbackOrOptions; + } + else if (labelsOrCallbackOrOptions) { + options = labelsOrCallbackOrOptions; + } + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + const deleteLabels = (labels) => { + const nullLabelMap = labels.reduce((nullLabelMap, labelKey) => { + nullLabelMap[labelKey] = null; + return nullLabelMap; + }, {}); + if ((options === null || options === void 0 ? void 0 : options.ifMetagenerationMatch) !== undefined) { + this.setLabels(nullLabelMap, options, callback); + } + else { + this.setLabels(nullLabelMap, callback); + } + }; + if (labels.length === 0) { + this.getLabels((err, labels) => { + if (err) { + callback(err); + return; + } + deleteLabels(Object.keys(labels)); + }); + } + else { + deleteLabels(labels); + } + } + /** + * @typedef {array} DisableRequesterPaysResponse + * @property {object} 0 The full API response. + */ + /** + * @callback DisableRequesterPaysCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + *

+ * Early Access Testers Only + *

+ * This feature is not yet widely-available. + *

+ *
+ * + * Disable `requesterPays` functionality from this bucket. + * + * @param {DisableRequesterPaysCallback} [callback] Callback function. + * @param {DisableRequesterPaysOptions} [options] Options, including precondition options + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.disableRequesterPays(function(err, apiResponse) { + * if (!err) { + * // requesterPays functionality disabled successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.disableRequesterPays().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/requesterPays.js + * region_tag:storage_disable_requester_pays + * Example of disabling requester pays: + */ + disableRequesterPays(optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.setMetadata({ + billing: { + requesterPays: false, + }, + }, options, callback); + } + /** + * Configuration object for enabling logging. + * + * @typedef {object} EnableLoggingOptions + * @property {string|Bucket} [bucket] The bucket for the log entries. By + * default, the current bucket is used. + * @property {string} prefix A unique prefix for log object names. + */ + /** + * Enable logging functionality for this bucket. This will make two API + * requests, first to grant Cloud Storage WRITE permission to the bucket, then + * to set the appropriate configuration on the Bucket's metadata. + * + * @param {EnableLoggingOptions} config Configuration options. + * @param {string|Bucket} [config.bucket] The bucket for the log entries. By + * default, the current bucket is used. + * @param {string} config.prefix A unique prefix for log object names. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * const config = { + * prefix: 'log' + * }; + * + * bucket.enableLogging(config, function(err, apiResponse) { + * if (!err) { + * // Logging functionality enabled successfully. + * } + * }); + * + * ``` + * @example + * Optionally, provide a destination bucket. + * ``` + * const config = { + * prefix: 'log', + * bucket: 'destination-bucket' + * }; + * + * bucket.enableLogging(config, function(err, apiResponse) {}); + * ``` + * + * @example + * If the callback is omitted, we'll return a Promise. + * ``` + * bucket.enableLogging(config).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + enableLogging(config, callback) { + if (!config || + typeof config === 'function' || + typeof config.prefix === 'undefined') { + throw new Error(BucketExceptionMessages.CONFIGURATION_OBJECT_PREFIX_REQUIRED); + } + let logBucket = this.id; + if (config.bucket && config.bucket instanceof Bucket) { + logBucket = config.bucket.id; + } + else if (config.bucket && typeof config.bucket === 'string') { + logBucket = config.bucket; + } + const options = {}; + if (config === null || config === void 0 ? void 0 : config.ifMetagenerationMatch) { + options.ifMetagenerationMatch = config.ifMetagenerationMatch; + } + if (config === null || config === void 0 ? void 0 : config.ifMetagenerationNotMatch) { + options.ifMetagenerationNotMatch = config.ifMetagenerationNotMatch; + } + (async () => { + try { + const [policy] = await this.iam.getPolicy(); + policy.bindings.push({ + members: ['group:cloud-storage-analytics@google.com'], + role: 'roles/storage.objectCreator', + }); + await this.iam.setPolicy(policy); + this.setMetadata({ + logging: { + logBucket, + logObjectPrefix: config.prefix, + }, + }, options, callback); + } + catch (e) { + callback(e); + return; + } + })(); + } + /** + * @typedef {array} EnableRequesterPaysResponse + * @property {object} 0 The full API response. + */ + /** + * @callback EnableRequesterPaysCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + *
+ * Early Access Testers Only + *

+ * This feature is not yet widely-available. + *

+ *
+ * + * Enable `requesterPays` functionality for this bucket. This enables you, the + * bucket owner, to have the requesting user assume the charges for the access + * to your bucket and its contents. + * + * @param {EnableRequesterPaysCallback | EnableRequesterPaysOptions} [optionsOrCallback] + * Callback function or precondition options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.enableRequesterPays(function(err, apiResponse) { + * if (!err) { + * // requesterPays functionality enabled successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.enableRequesterPays().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/requesterPays.js + * region_tag:storage_enable_requester_pays + * Example of enabling requester pays: + */ + enableRequesterPays(optionsOrCallback, cb) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + cb = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.setMetadata({ + billing: { + requesterPays: true, + }, + }, options, cb); + } + /** + * Create a {@link File} object. See {@link File} to see how to handle + * the different use cases you may have. + * + * @param {string} name The name of the file in this bucket. + * @param {FileOptions} [options] Configuration options. + * @param {string|number} [options.generation] Only use a specific revision of + * this file. + * @param {string} [options.encryptionKey] A custom encryption key. See + * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. + * @param {string} [options.kmsKeyName] The name of the Cloud KMS key that will + * be used to encrypt the object. Must be in the format: + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. + * KMS key ring must use the same location as the bucket. + * @param {string} [options.userProject] The ID of the project which will be + * billed for all requests made from File object. + * @returns {File} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const file = bucket.file('my-existing-file.png'); + * ``` + */ + file(name, options) { + if (!name) { + throw Error(BucketExceptionMessages.SPECIFY_FILE_NAME); + } + return new file_js_1.File(this, name, options); + } + /** + * @typedef {array} GetFilesResponse + * @property {File[]} 0 Array of {@link File} instances. + * @param {object} nextQuery 1 A query object to receive more results. + * @param {object} apiResponse 2 The full API response. + */ + /** + * @callback GetFilesCallback + * @param {?Error} err Request error, if any. + * @param {File[]} files Array of {@link File} instances. + * @param {object} nextQuery A query object to receive more results. + * @param {object} apiResponse The full API response. + */ + /** + * Query object for listing files. + * + * @typedef {object} GetFilesOptions + * @property {boolean} [autoPaginate=true] Have pagination handled + * automatically. + * @property {string} [delimiter] Results will contain only objects whose + * names, aside from the prefix, do not contain delimiter. Objects whose + * names, aside from the prefix, contain delimiter will have their name + * truncated after the delimiter, returned in `apiResponse.prefixes`. + * Duplicate prefixes are omitted. + * @property {string} [endOffset] Filter results to objects whose names are + * lexicographically before endOffset. If startOffset is also set, the objects + * listed have names between startOffset (inclusive) and endOffset (exclusive). + * @property {boolean} [includeFoldersAsPrefixes] If true, includes folders and + * managed folders in the set of prefixes returned by the query. Only applicable if + * delimiter is set to / and autoPaginate is set to false. + * See: https://cloud.google.com/storage/docs/managed-folders + * @property {boolean} [includeTrailingDelimiter] If true, objects that end in + * exactly one instance of delimiter have their metadata included in items[] + * in addition to the relevant part of the object name appearing in prefixes[]. + * @property {string} [prefix] Filter results to objects whose names begin + * with this prefix. + * @property {string} [matchGlob] A glob pattern used to filter results, + * for example foo*bar + * @property {number} [maxApiCalls] Maximum number of API calls to make. + * @property {number} [maxResults] Maximum number of items plus prefixes to + * return per call. + * Note: By default will handle pagination automatically + * if more than 1 page worth of results are requested per call. + * When `autoPaginate` is set to `false` the smaller of `maxResults` + * or 1 page of results will be returned per call. + * @property {string} [pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @property {boolean} [softDeleted] If true, only soft-deleted object versions will be + * listed as distinct results in order of generation number. Note `soft_deleted` and + * `versions` cannot be set to true simultaneously. + * @property {string} [startOffset] Filter results to objects whose names are + * lexicographically equal to or after startOffset. If endOffset is also set, + * the objects listed have names between startOffset (inclusive) and endOffset (exclusive). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {boolean} [versions] If true, returns File objects scoped to + * their versions. + */ + /** + * Get {@link File} objects for the files currently in the bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/list| Objects: list API Documentation} + * + * @param {GetFilesOptions} [query] Query object for listing files. + * @param {boolean} [query.autoPaginate=true] Have pagination handled + * automatically. + * @param {string} [query.delimiter] Results will contain only objects whose + * names, aside from the prefix, do not contain delimiter. Objects whose + * names, aside from the prefix, contain delimiter will have their name + * truncated after the delimiter, returned in `apiResponse.prefixes`. + * Duplicate prefixes are omitted. + * @param {string} [query.endOffset] Filter results to objects whose names are + * lexicographically before endOffset. If startOffset is also set, the objects + * listed have names between startOffset (inclusive) and endOffset (exclusive). + * @param {boolean} [query.includeFoldersAsPrefixes] If true, includes folders and + * managed folders in the set of prefixes returned by the query. Only applicable if + * delimiter is set to / and autoPaginate is set to false. + * See: https://cloud.google.com/storage/docs/managed-folders + * @param {boolean} [query.includeTrailingDelimiter] If true, objects that end in + * exactly one instance of delimiter have their metadata included in items[] + * in addition to the relevant part of the object name appearing in prefixes[]. + * @param {string} [query.prefix] Filter results to objects whose names begin + * with this prefix. + * @param {number} [query.maxApiCalls] Maximum number of API calls to make. + * @param {number} [query.maxResults] Maximum number of items plus prefixes to + * return per call. + * Note: By default will handle pagination automatically + * if more than 1 page worth of results are requested per call. + * When `autoPaginate` is set to `false` the smaller of `maxResults` + * or 1 page of results will be returned per call. + * @param {string} [query.pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @param {boolean} [query.softDeleted] If true, only soft-deleted object versions will be + * listed as distinct results in order of generation number. Note `soft_deleted` and + * `versions` cannot be set to true simultaneously. + * @param {string} [query.startOffset] Filter results to objects whose names are + * lexicographically equal to or after startOffset. If endOffset is also set, + * the objects listed have names between startOffset (inclusive) and endOffset (exclusive). + * @param {string} [query.userProject] The ID of the project which will be + * billed for the request. + * @param {boolean} [query.versions] If true, returns File objects scoped to + * their versions. + * @param {GetFilesCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getFiles(function(err, files) { + * if (!err) { + * // files is an array of File objects. + * } + * }); + * + * //- + * // If your bucket has versioning enabled, you can get all of your files + * // scoped to their generation. + * //- + * bucket.getFiles({ + * versions: true + * }, function(err, files) { + * // Each file is scoped to its generation. + * }); + * + * //- + * // To control how many API requests are made and page through the results + * // manually, set `autoPaginate` to `false`. + * //- + * const callback = function(err, files, nextQuery, apiResponse) { + * if (nextQuery) { + * // More results exist. + * bucket.getFiles(nextQuery, callback); + * } + * + * // The `metadata` property is populated for you with the metadata at the + * // time of fetching. + * files[0].metadata; + * + * // However, in cases where you are concerned the metadata could have + * // changed, use the `getMetadata` method. + * files[0].getMetadata(function(err, metadata) {}); + * }; + * + * bucket.getFiles({ + * autoPaginate: false + * }, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getFiles().then(function(data) { + * const files = data[0]; + * }); + * + * ``` + * @example + *
Simulating a File System

With `autoPaginate: false`, it's possible to iterate over files which incorporate a common structure using a delimiter.

Consider the following remote objects:

  1. "a"
  2. "a/b/c/d"
  3. "b/d/e"

Using a delimiter of `/` will return a single file, "a".

`apiResponse.prefixes` will return the "sub-directories" that were found:

  1. "a/"
  2. "b/"
+ * ``` + * bucket.getFiles({ + * autoPaginate: false, + * delimiter: '/' + * }, function(err, files, nextQuery, apiResponse) { + * // files = [ + * // {File} // File object for file "a" + * // ] + * + * // apiResponse.prefixes = [ + * // 'a/', + * // 'b/' + * // ] + * }); + * ``` + * + * @example + * Using prefixes, it's now possible to simulate a file system with follow-up requests. + * ``` + * bucket.getFiles({ + * autoPaginate: false, + * delimiter: '/', + * prefix: 'a/' + * }, function(err, files, nextQuery, apiResponse) { + * // No files found within "directory" a. + * // files = [] + * + * // However, a "sub-directory" was found. + * // This prefix can be used to continue traversing the "file system". + * // apiResponse.prefixes = [ + * // 'a/b/' + * // ] + * }); + * ``` + * + * @example include:samples/files.js + * region_tag:storage_list_files + * Another example: + * + * @example include:samples/files.js + * region_tag:storage_list_files_with_prefix + * Example of listing files, filtered by a prefix: + */ + getFiles(queryOrCallback, callback) { + let query = typeof queryOrCallback === 'object' ? queryOrCallback : {}; + if (!callback) { + callback = queryOrCallback; + } + query = Object.assign({}, query); + if (query.fields && + query.autoPaginate && + !query.fields.includes('nextPageToken')) { + query.fields = `${query.fields},nextPageToken`; + } + this.request({ + uri: '/o', + qs: query, + }, (err, resp) => { + if (err) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + callback(err, null, null, resp); + return; + } + const itemsArray = resp.items ? resp.items : []; + const files = itemsArray.map((file) => { + const options = {}; + if (query.fields) { + const fileInstance = file; + return fileInstance; + } + if (query.versions) { + options.generation = file.generation; + } + if (file.kmsKeyName) { + options.kmsKeyName = file.kmsKeyName; + } + const fileInstance = this.file(file.name, options); + fileInstance.metadata = file; + return fileInstance; + }); + let nextQuery = null; + if (resp.nextPageToken) { + nextQuery = Object.assign({}, query, { + pageToken: resp.nextPageToken, + }); + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + callback(null, files, nextQuery, resp); + }); + } + /** + * @deprecated + * @typedef {object} GetLabelsOptions Configuration options for Bucket#getLabels(). + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @deprecated + * @typedef {array} GetLabelsResponse + * @property {object} 0 Object of labels currently set on this bucket. + */ + /** + * @deprecated + * @callback GetLabelsCallback + * @param {?Error} err Request error, if any. + * @param {object} labels Object of labels currently set on this bucket. + */ + /** + * @deprecated Use getMetadata directly. + * Get the labels currently set on this bucket. + * + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetLabelsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getLabels(function(err, labels) { + * if (err) { + * // Error handling omitted. + * } + * + * // labels = { + * // label: 'labelValue', + * // ... + * // } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getLabels().then(function(data) { + * const labels = data[0]; + * }); + * ``` + */ + getLabels(optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.getMetadata(options, (err, metadata) => { + if (err) { + callback(err, null); + return; + } + callback(null, (metadata === null || metadata === void 0 ? void 0 : metadata.labels) || {}); + }); + } + /** + * @typedef {object} GetNotificationsOptions Configuration options for Bucket#getNotification(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback GetNotificationsCallback + * @param {?Error} err Request error, if any. + * @param {Notification[]} notifications Array of {@link Notification} + * instances. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} GetNotificationsResponse + * @property {Notification[]} 0 Array of {@link Notification} instances. + * @property {object} 1 The full API response. + */ + /** + * Retrieves a list of notification subscriptions for a given bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/list| Notifications: list} + * + * @param {GetNotificationsOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetNotificationsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * bucket.getNotifications(function(err, notifications, apiResponse) { + * if (!err) { + * // notifications is an array of Notification objects. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getNotifications().then(function(data) { + * const notifications = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/listNotifications.js + * region_tag:storage_list_bucket_notifications + * Another example: + */ + getNotifications(optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.request({ + uri: '/notificationConfigs', + qs: options, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + const itemsArray = resp.items ? resp.items : []; + const notifications = itemsArray.map((notification) => { + const notificationInstance = this.notification(notification.id); + notificationInstance.metadata = notification; + return notificationInstance; + }); + callback(null, notifications, resp); + }); + } + /** + * @typedef {array} GetSignedUrlResponse + * @property {object} 0 The signed URL. + */ + /** + * @callback GetSignedUrlCallback + * @param {?Error} err Request error, if any. + * @param {object} url The signed URL. + */ + /** + * @typedef {object} GetBucketSignedUrlConfig + * @property {string} action Only listing objects within a bucket (HTTP: GET) is supported for bucket-level signed URLs. + * @property {*} expires A timestamp when this link will expire. Any value + * given is passed to `new Date()`. + * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now. + * @property {string} [version='v2'] The signing version to use, either + * 'v2' or 'v4'. + * @property {boolean} [virtualHostedStyle=false] Use virtual hosted-style + * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style + * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs + * should generally be preferred instaed of path-style URL. + * Currently defaults to `false` for path-style, although this may change in a + * future major-version release. + * @property {string} [cname] The cname for this bucket, i.e., + * "https://cdn.example.com". + * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example} + * @property {object} [extensionHeaders] If these headers are used, the + * server will check to make sure that the client provides matching + * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers} + * for the requirements of this feature, most notably: + * - The header name must be prefixed with `x-goog-` + * - The header name must be all lowercase + * + * Note: Multi-valued header passed as an array in the extensionHeaders + * object is converted into a string, delimited by `,` with + * no space. Requests made using the signed URL will need to + * delimit multi-valued headers using a single `,` as well, or + * else the server will report a mismatched signature. + * @property {object} [queryParams] Additional query parameters to include + * in the signed URL. + */ + /** + * Get a signed URL to allow limited time access to a bucket. + * + * In Google Cloud Platform environments, such as Cloud Functions and App + * Engine, you usually don't provide a `keyFilename` or `credentials` during + * instantiation. In those environments, we call the + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} + * to create a signed URL. That API requires either the + * `https://www.googleapis.com/auth/iam` or + * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are + * enabled. + * + * See {@link https://cloud.google.com/storage/docs/access-control/signed-urls| Signed URLs Reference} + * + * @throws {Error} if an expiration timestamp from the past is given. + * + * @param {GetBucketSignedUrlConfig} config Configuration object. + * @param {string} config.action Currently only supports "list" (HTTP: GET). + * @param {*} config.expires A timestamp when this link will expire. Any value + * given is passed to `new Date()`. + * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now. + * @param {string} [config.version='v2'] The signing version to use, either + * 'v2' or 'v4'. + * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style + * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style + * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs + * should generally be preferred instaed of path-style URL. + * Currently defaults to `false` for path-style, although this may change in a + * future major-version release. + * @param {string} [config.cname] The cname for this bucket, i.e., + * "https://cdn.example.com". + * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example} + * @param {object} [config.extensionHeaders] If these headers are used, the + * server will check to make sure that the client provides matching + * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers} + * for the requirements of this feature, most notably: + * - The header name must be prefixed with `x-goog-` + * - The header name must be all lowercase + * + * Note: Multi-valued header passed as an array in the extensionHeaders + * object is converted into a string, delimited by `,` with + * no space. Requests made using the signed URL will need to + * delimit multi-valued headers using a single `,` as well, or + * else the server will report a mismatched signature. + * @property {object} [config.queryParams] Additional query parameters to include + * in the signed URL. + * @param {GetSignedUrlCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * //- + * // Generate a URL that allows temporary access to list files in a bucket. + * //- + * const request = require('request'); + * + * const config = { + * action: 'list', + * expires: '03-17-2025' + * }; + * + * bucket.getSignedUrl(config, function(err, url) { + * if (err) { + * console.error(err); + * return; + * } + * + * // The bucket is now available to be listed from this URL. + * request(url, function(err, resp) { + * // resp.statusCode = 200 + * }); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getSignedUrl(config).then(function(data) { + * const url = data[0]; + * }); + * ``` + */ + getSignedUrl(cfg, callback) { + const method = BucketActionToHTTPMethod[cfg.action]; + const signConfig = { + method, + expires: cfg.expires, + version: cfg.version, + cname: cfg.cname, + extensionHeaders: cfg.extensionHeaders || {}, + queryParams: cfg.queryParams || {}, + host: cfg.host, + signingEndpoint: cfg.signingEndpoint, + }; + if (!this.signer) { + this.signer = new signer_js_1.URLSigner(this.storage.authClient, this, undefined, this.storage); + } + this.signer + .getSignedUrl(signConfig) + .then(signedUrl => callback(null, signedUrl), callback); + } + /** + * @callback BucketLockCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Lock a previously-defined retention policy. This will prevent changes to + * the policy. + * + * @throws {Error} if a metageneration is not provided. + * + * @param {number|string} metageneration The bucket's metageneration. This is + * accesssible from calling {@link File#getMetadata}. + * @param {BucketLockCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const bucket = storage.bucket('albums'); + * + * const metageneration = 2; + * + * bucket.lock(metageneration, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.lock(metageneration).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + lock(metageneration, callback) { + const metatype = typeof metageneration; + if (metatype !== 'number' && metatype !== 'string') { + throw new Error(BucketExceptionMessages.METAGENERATION_NOT_PROVIDED); + } + this.request({ + method: 'POST', + uri: '/lockRetentionPolicy', + qs: { + ifMetagenerationMatch: metageneration, + }, + }, callback); + } + /** + * @typedef {object} RestoreOptions Options for Bucket#restore(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/restore#resource| Object resource}. + * @param {number} [generation] If present, selects a specific revision of this object. + * @param {string} [projection] Specifies the set of properties to return. If used, must be 'full' or 'noAcl'. + */ + /** + * Restores a soft-deleted bucket + * @param {RestoreOptions} options Restore options. + * @returns {Promise} + */ + async restore(options) { + const [bucket] = await this.request({ + method: 'POST', + uri: '/restore', + qs: options, + }); + return bucket; + } + /** + * @typedef {array} MakeBucketPrivateResponse + * @property {File[]} 0 List of files made private. + */ + /** + * @callback MakeBucketPrivateCallback + * @param {?Error} err Request error, if any. + * @param {File[]} files List of files made private. + */ + /** + * @typedef {object} MakeBucketPrivateOptions + * @property {boolean} [includeFiles=false] Make each file in the bucket + * private. + * @property {Metadata} [metadata] Define custom metadata properties to define + * along with the operation. + * @property {boolean} [force] Queue errors occurred while making files + * private until all files have been processed. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Make the bucket listing private. + * + * You may also choose to make the contents of the bucket private by + * specifying `includeFiles: true`. This will automatically run + * {@link File#makePrivate} for every file in the bucket. + * + * When specifying `includeFiles: true`, use `force: true` to delay execution + * of your callback until all files have been processed. By default, the + * callback is executed after the first error. Use `force` to queue such + * errors until all files have been processed, after which they will be + * returned as an array as the first argument to your callback. + * + * NOTE: This may cause the process to be long-running and use a high number + * of requests. Use with caution. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} + * + * @param {MakeBucketPrivateOptions} [options] Configuration options. + * @param {boolean} [options.includeFiles=false] Make each file in the bucket + * private. + * @param {Metadata} [options.metadata] Define custom metadata properties to define + * along with the operation. + * @param {boolean} [options.force] Queue errors occurred while making files + * private until all files have been processed. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {MakeBucketPrivateCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Make the bucket private. + * //- + * bucket.makePrivate(function(err) {}); + * + * //- + * // Make the bucket and its contents private. + * //- + * const opts = { + * includeFiles: true + * }; + * + * bucket.makePrivate(opts, function(err, files) { + * // `err`: + * // The first error to occur, otherwise null. + * // + * // `files`: + * // Array of files successfully made private in the bucket. + * }); + * + * //- + * // Make the bucket and its contents private, using force to suppress errors + * // until all files have been processed. + * //- + * const opts = { + * includeFiles: true, + * force: true + * }; + * + * bucket.makePrivate(opts, function(errors, files) { + * // `errors`: + * // Array of errors if any occurred, otherwise null. + * // + * // `files`: + * // Array of files successfully made private in the bucket. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.makePrivate(opts).then(function(data) { + * const files = data[0]; + * }); + * ``` + */ + makePrivate(optionsOrCallback, callback) { + var _a, _b, _c, _d; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + options.private = true; + const query = { + predefinedAcl: 'projectPrivate', + }; + if (options.userProject) { + query.userProject = options.userProject; + } + if ((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) { + query.ifGenerationMatch = options.preconditionOpts.ifGenerationMatch; + } + if ((_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationNotMatch) { + query.ifGenerationNotMatch = + options.preconditionOpts.ifGenerationNotMatch; + } + if ((_c = options.preconditionOpts) === null || _c === void 0 ? void 0 : _c.ifMetagenerationMatch) { + query.ifMetagenerationMatch = + options.preconditionOpts.ifMetagenerationMatch; + } + if ((_d = options.preconditionOpts) === null || _d === void 0 ? void 0 : _d.ifMetagenerationNotMatch) { + query.ifMetagenerationNotMatch = + options.preconditionOpts.ifMetagenerationNotMatch; + } + // You aren't allowed to set both predefinedAcl & acl properties on a bucket + // so acl must explicitly be nullified. + const metadata = { ...options.metadata, acl: null }; + this.setMetadata(metadata, query, (err) => { + if (err) { + callback(err); + } + const internalCall = () => { + if (options.includeFiles) { + return (0, util_1.promisify)(this.makeAllFilesPublicPrivate_).call(this, options); + } + return Promise.resolve([]); + }; + internalCall() + .then(files => callback(null, files)) + .catch(callback); + }); + } + /** + * @typedef {object} MakeBucketPublicOptions + * @property {boolean} [includeFiles=false] Make each file in the bucket + * private. + * @property {boolean} [force] Queue errors occurred while making files + * private until all files have been processed. + */ + /** + * @callback MakeBucketPublicCallback + * @param {?Error} err Request error, if any. + * @param {File[]} files List of files made public. + */ + /** + * @typedef {array} MakeBucketPublicResponse + * @property {File[]} 0 List of files made public. + */ + /** + * Make the bucket publicly readable. + * + * You may also choose to make the contents of the bucket publicly readable by + * specifying `includeFiles: true`. This will automatically run + * {@link File#makePublic} for every file in the bucket. + * + * When specifying `includeFiles: true`, use `force: true` to delay execution + * of your callback until all files have been processed. By default, the + * callback is executed after the first error. Use `force` to queue such + * errors until all files have been processed, after which they will be + * returned as an array as the first argument to your callback. + * + * NOTE: This may cause the process to be long-running and use a high number + * of requests. Use with caution. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} + * + * @param {MakeBucketPublicOptions} [options] Configuration options. + * @param {boolean} [options.includeFiles=false] Make each file in the bucket + * private. + * @param {boolean} [options.force] Queue errors occurred while making files + * private until all files have been processed. + * @param {MakeBucketPublicCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Make the bucket publicly readable. + * //- + * bucket.makePublic(function(err) {}); + * + * //- + * // Make the bucket and its contents publicly readable. + * //- + * const opts = { + * includeFiles: true + * }; + * + * bucket.makePublic(opts, function(err, files) { + * // `err`: + * // The first error to occur, otherwise null. + * // + * // `files`: + * // Array of files successfully made public in the bucket. + * }); + * + * //- + * // Make the bucket and its contents publicly readable, using force to + * // suppress errors until all files have been processed. + * //- + * const opts = { + * includeFiles: true, + * force: true + * }; + * + * bucket.makePublic(opts, function(errors, files) { + * // `errors`: + * // Array of errors if any occurred, otherwise null. + * // + * // `files`: + * // Array of files successfully made public in the bucket. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.makePublic(opts).then(function(data) { + * const files = data[0]; + * }); + * ``` + */ + makePublic(optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const req = { public: true, ...options }; + this.acl + .add({ + entity: 'allUsers', + role: 'READER', + }) + .then(() => { + return this.acl.default.add({ + entity: 'allUsers', + role: 'READER', + }); + }) + .then(() => { + if (req.includeFiles) { + return (0, util_1.promisify)(this.makeAllFilesPublicPrivate_).call(this, req); + } + return []; + }) + .then(files => callback(null, files), callback); + } + /** + * Get a reference to a Cloud Pub/Sub Notification. + * + * @param {string} id ID of notification. + * @returns {Notification} + * @see Notification + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const notification = bucket.notification('1'); + * ``` + */ + notification(id) { + if (!id) { + throw new Error(BucketExceptionMessages.SUPPLY_NOTIFICATION_ID); + } + return new notification_js_1.Notification(this, id); + } + /** + * Remove an already-existing retention policy from this bucket, if it is not + * locked. + * + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @param {SetBucketMetadataOptions} [options] Options, including precondition options + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const bucket = storage.bucket('albums'); + * + * bucket.removeRetentionPeriod(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.removeRetentionPeriod().then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + removeRetentionPeriod(optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + this.setMetadata({ + retentionPolicy: null, + }, options, callback); + } + /** + * Makes request and applies userProject query parameter if necessary. + * + * @private + * + * @param {object} reqOpts - The request options. + * @param {function} callback - The callback function. + */ + request(reqOpts, callback) { + if (this.userProject && (!reqOpts.qs || !reqOpts.qs.userProject)) { + reqOpts.qs = { ...reqOpts.qs, userProject: this.userProject }; + } + return super.request(reqOpts, callback); + } + /** + * @deprecated + * @typedef {array} SetLabelsResponse + * @property {object} 0 The bucket metadata. + */ + /** + * @deprecated + * @callback SetLabelsCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata The bucket metadata. + */ + /** + * @deprecated + * @typedef {object} SetLabelsOptions Configuration options for Bucket#setLabels(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @deprecated Use setMetadata directly. + * Set labels on the bucket. + * + * This makes an underlying call to {@link Bucket#setMetadata}, which + * is a PATCH request. This means an individual label can be overwritten, but + * unmentioned labels will not be touched. + * + * @param {object} labels Labels to set on the bucket. + * @param {SetLabelsOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {SetLabelsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * const labels = { + * labelone: 'labelonevalue', + * labeltwo: 'labeltwovalue' + * }; + * + * bucket.setLabels(labels, function(err, metadata) { + * if (!err) { + * // Labels set successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setLabels(labels).then(function(data) { + * const metadata = data[0]; + * }); + * ``` + */ + setLabels(labels, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + callback = callback || index_js_1.util.noop; + this.setMetadata({ labels }, options, callback); + } + setMetadata(metadata, optionsOrCallback, cb) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = + typeof optionsOrCallback === 'function' + ? optionsOrCallback + : cb; + this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, AvailableServiceObjectMethods.setMetadata, options); + super + .setMetadata(metadata, options) + .then(resp => cb(null, ...resp)) + .catch(cb) + .finally(() => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + }); + } + /** + * Lock all objects contained in the bucket, based on their creation time. Any + * attempt to overwrite or delete objects younger than the retention period + * will result in a `PERMISSION_DENIED` error. + * + * An unlocked retention policy can be modified or removed from the bucket via + * {@link File#removeRetentionPeriod} and {@link File#setRetentionPeriod}. A + * locked retention policy cannot be removed or shortened in duration for the + * lifetime of the bucket. Attempting to remove or decrease period of a locked + * retention policy will result in a `PERMISSION_DENIED` error. You can still + * increase the policy. + * + * @param {*} duration In seconds, the minimum retention time for all objects + * contained in this bucket. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @param {SetBucketMetadataCallback} [options] Options, including precondition options. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const bucket = storage.bucket('albums'); + * + * const DURATION_SECONDS = 15780000; // 6 months. + * + * //- + * // Lock the objects in this bucket for 6 months. + * //- + * bucket.setRetentionPeriod(DURATION_SECONDS, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setRetentionPeriod(DURATION_SECONDS).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + setRetentionPeriod(duration, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + this.setMetadata({ + retentionPolicy: { + retentionPeriod: duration.toString(), + }, + }, options, callback); + } + /** + * + * @typedef {object} Cors + * @property {number} [maxAgeSeconds] The number of seconds the browser is + * allowed to make requests before it must repeat the preflight request. + * @property {string[]} [method] HTTP method allowed for cross origin resource + * sharing with this bucket. + * @property {string[]} [origin] an origin allowed for cross origin resource + * sharing with this bucket. + * @property {string[]} [responseHeader] A header allowed for cross origin + * resource sharing with this bucket. + */ + /** + * This can be used to set the CORS configuration on the bucket. + * + * The configuration will be overwritten with the value passed into this. + * + * @param {Cors[]} corsConfiguration The new CORS configuration to set + * @param {number} [corsConfiguration.maxAgeSeconds] The number of seconds the browser is + * allowed to make requests before it must repeat the preflight request. + * @param {string[]} [corsConfiguration.method] HTTP method allowed for cross origin resource + * sharing with this bucket. + * @param {string[]} [corsConfiguration.origin] an origin allowed for cross origin resource + * sharing with this bucket. + * @param {string[]} [corsConfiguration.responseHeader] A header allowed for cross origin + * resource sharing with this bucket. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @param {SetBucketMetadataOptions} [options] Options, including precondition options. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const bucket = storage.bucket('albums'); + * + * const corsConfiguration = [{maxAgeSeconds: 3600}]; // 1 hour + * bucket.setCorsConfiguration(corsConfiguration); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setCorsConfiguration(corsConfiguration).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + setCorsConfiguration(corsConfiguration, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + this.setMetadata({ + cors: corsConfiguration, + }, options, callback); + } + /** + * @typedef {object} SetBucketStorageClassOptions + * @property {string} [userProject] - The ID of the project which will be + * billed for the request. + */ + /** + * @callback SetBucketStorageClassCallback + * @param {?Error} err Request error, if any. + */ + /** + * Set the default storage class for new files in this bucket. + * + * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes} + * + * @param {string} storageClass The new storage class. (`standard`, + * `nearline`, `coldline`, or `archive`). + * **Note:** The storage classes `multi_regional`, `regional`, and + * `durable_reduced_availability` are now legacy and will be deprecated in + * the future. + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] - The ID of the project which will be + * billed for the request. + * @param {SetStorageClassCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.setStorageClass('nearline', function(err, apiResponse) { + * if (err) { + * // Error handling omitted. + * } + * + * // The storage class was updated successfully. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setStorageClass('nearline').then(function() {}); + * ``` + */ + setStorageClass(storageClass, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + // In case we get input like `storageClass`, convert to `storage_class`. + storageClass = storageClass + .replace(/-/g, '_') + .replace(/([a-z])([A-Z])/g, (_, low, up) => { + return low + '_' + up; + }) + .toUpperCase(); + this.setMetadata({ storageClass }, options, callback); + } + /** + * Set a user project to be billed for all requests made from this Bucket + * object and any files referenced from this Bucket object. + * + * @param {string} userProject The user project. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.setUserProject('grape-spaceship-123'); + * ``` + */ + setUserProject(userProject) { + this.userProject = userProject; + const methods = [ + 'create', + 'delete', + 'exists', + 'get', + 'getMetadata', + 'setMetadata', + ]; + methods.forEach(method => { + const methodConfig = this.methods[method]; + if (typeof methodConfig === 'object') { + if (typeof methodConfig.reqOpts === 'object') { + Object.assign(methodConfig.reqOpts.qs, { userProject }); + } + else { + methodConfig.reqOpts = { + qs: { userProject }, + }; + } + } + }); + } + /** + * @typedef {object} UploadOptions Configuration options for Bucket#upload(). + * @property {string|File} [destination] The place to save + * your file. If given a string, the file will be uploaded to the bucket + * using the string as a filename. When given a File object, your local + * file will be uploaded to the File object's bucket and under the File + * object's name. Lastly, when this argument is omitted, the file is uploaded + * to your bucket using the name of the local file. + * @property {string} [encryptionKey] A custom encryption key. See + * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. + * @property {boolean} [gzip] Automatically gzip the file. This will set + * `options.metadata.contentEncoding` to `gzip`. + * @property {string} [kmsKeyName] The name of the Cloud KMS key that will + * be used to encrypt the object. Must be in the format: + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. + * @property {object} [metadata] See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body}. + * @property {string} [offset] The starting byte of the upload stream, for + * resuming an interrupted upload. Defaults to 0. + * @property {string} [predefinedAcl] Apply a predefined set of access + * controls to this object. + * + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @property {boolean} [private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @property {boolean} [public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @property {boolean} [resumable=true] Resumable uploads are automatically + * enabled and must be shut off explicitly by setting to false. + * @property {number} [timeout=60000] Set the HTTP request timeout in + * milliseconds. This option is not available for resumable uploads. + * Default: `60000` + * @property {string} [uri] The URI for an already-created resumable + * upload. See {@link File#createResumableUpload}. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string|boolean} [validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with an + * MD5 checksum for maximum reliability. CRC32c will provide better + * performance with less reliability. You may also choose to skip + * validation completely, however this is **not recommended**. + */ + /** + * @typedef {array} UploadResponse + * @property {object} 0 The uploaded {@link File}. + * @property {object} 1 The full API response. + */ + /** + * @callback UploadCallback + * @param {?Error} err Request error, if any. + * @param {object} file The uploaded {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * Upload a file to the bucket. This is a convenience method that wraps + * {@link File#createWriteStream}. + * + * Resumable uploads are enabled by default + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload#uploads| Upload Options (Simple or Resumable)} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert| Objects: insert API Documentation} + * + * @param {string} pathString The fully qualified path to the file you + * wish to upload to your bucket. + * @param {UploadOptions} [options] Configuration options. + * @param {string|File} [options.destination] The place to save + * your file. If given a string, the file will be uploaded to the bucket + * using the string as a filename. When given a File object, your local + * file will be uploaded to the File object's bucket and under the File + * object's name. Lastly, when this argument is omitted, the file is uploaded + * to your bucket using the name of the local file. + * @param {string} [options.encryptionKey] A custom encryption key. See + * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. + * @param {boolean} [options.gzip] Automatically gzip the file. This will set + * `options.metadata.contentEncoding` to `gzip`. + * @param {string} [options.kmsKeyName] The name of the Cloud KMS key that will + * be used to encrypt the object. Must be in the format: + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. + * @param {object} [options.metadata] See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body}. + * @param {string} [options.offset] The starting byte of the upload stream, for + * resuming an interrupted upload. Defaults to 0. + * @param {string} [options.predefinedAcl] Apply a predefined set of access + * controls to this object. + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @param {boolean} [options.private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @param {boolean} [options.public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @param {boolean} [options.resumable=true] Resumable uploads are automatically + * enabled and must be shut off explicitly by setting to false. + * @param {number} [options.timeout=60000] Set the HTTP request timeout in + * milliseconds. This option is not available for resumable uploads. + * Default: `60000` + * @param {string} [options.uri] The URI for an already-created resumable + * upload. See {@link File#createResumableUpload}. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {string|boolean} [options.validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with an + * MD5 checksum for maximum reliability. CRC32c will provide better + * performance with less reliability. You may also choose to skip + * validation completely, however this is **not recommended**. + * @param {UploadCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Upload a file from a local path. + * //- + * bucket.upload('/local/path/image.png', function(err, file, apiResponse) { + * // Your bucket now contains: + * // - "image.png" (with the contents of `/local/path/image.png') + * + * // `file` is an instance of a File object that refers to your new file. + * }); + * + * + * //- + * // It's not always that easy. You will likely want to specify the filename + * // used when your new file lands in your bucket. + * // + * // You may also want to set metadata or customize other options. + * //- + * const options = { + * destination: 'new-image.png', + * validation: 'crc32c', + * metadata: { + * metadata: { + * event: 'Fall trip to the zoo' + * } + * } + * }; + * + * bucket.upload('local-image.png', options, function(err, file) { + * // Your bucket now contains: + * // - "new-image.png" (with the contents of `local-image.png') + * + * // `file` is an instance of a File object that refers to your new file. + * }); + * + * //- + * // You can also have a file gzip'd on the fly. + * //- + * bucket.upload('index.html', { gzip: true }, function(err, file) { + * // Your bucket now contains: + * // - "index.html" (automatically compressed with gzip) + * + * // Downloading the file with `file.download` will automatically decode + * the + * // file. + * }); + * + * //- + * // You may also re-use a File object, {File}, that references + * // the file you wish to create or overwrite. + * //- + * const options = { + * destination: bucket.file('existing-file.png'), + * resumable: false + * }; + * + * bucket.upload('local-img.png', options, function(err, newFile) { + * // Your bucket now contains: + * // - "existing-file.png" (with the contents of `local-img.png') + * + * // Note: + * // The `newFile` parameter is equal to `file`. + * }); + * + * //- + * // To use + * // + * // Customer-supplied Encryption Keys, provide the `encryptionKey` + * option. + * //- + * const crypto = require('crypto'); + * const encryptionKey = crypto.randomBytes(32); + * + * bucket.upload('img.png', { + * encryptionKey: encryptionKey + * }, function(err, newFile) { + * // `img.png` was uploaded with your custom encryption key. + * + * // `newFile` is already configured to use the encryption key when making + * // operations on the remote object. + * + * // However, to use your encryption key later, you must create a `File` + * // instance with the `key` supplied: + * const file = bucket.file('img.png', { + * encryptionKey: encryptionKey + * }); + * + * // Or with `file#setEncryptionKey`: + * const file = bucket.file('img.png'); + * file.setEncryptionKey(encryptionKey); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.upload('local-image.png').then(function(data) { + * const file = data[0]; + * }); + * + * To upload a file from a URL, use {@link File#createWriteStream}. + * + * ``` + * @example include:samples/files.js + * region_tag:storage_upload_file + * Another example: + * + * @example include:samples/encryption.js + * region_tag:storage_upload_encrypted_file + * Example of uploading an encrypted file: + */ + upload(pathString, optionsOrCallback, callback) { + var _a, _b; + const upload = (numberOfRetries) => { + const returnValue = (0, async_retry_1.default)(async (bail) => { + await new Promise((resolve, reject) => { + var _a, _b; + if (numberOfRetries === 0 && + ((_b = (_a = newFile === null || newFile === void 0 ? void 0 : newFile.storage) === null || _a === void 0 ? void 0 : _a.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry)) { + newFile.storage.retryOptions.autoRetry = false; + } + const writable = newFile.createWriteStream(options); + if (options.onUploadProgress) { + writable.on('progress', options.onUploadProgress); + } + fs.createReadStream(pathString) + .on('error', bail) + .pipe(writable) + .on('error', err => { + if (this.storage.retryOptions.autoRetry && + this.storage.retryOptions.retryableErrorFn(err)) { + return reject(err); + } + else { + return bail(err); + } + }) + .on('finish', () => { + return resolve(); + }); + }); + }, { + retries: numberOfRetries, + factor: this.storage.retryOptions.retryDelayMultiplier, + maxTimeout: this.storage.retryOptions.maxRetryDelay * 1000, //convert to milliseconds + maxRetryTime: this.storage.retryOptions.totalTimeout * 1000, //convert to milliseconds + }); + if (!callback) { + return returnValue; + } + else { + return returnValue + .then(() => { + if (callback) { + return callback(null, newFile, newFile.metadata); + } + }) + .catch(callback); + } + }; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + if (global['GCLOUD_SANDBOX_ENV']) { + return; + } + let options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + options = Object.assign({ + metadata: {}, + }, options); + // Do not retry if precondition option ifGenerationMatch is not set + // because this is a file operation + let maxRetries = this.storage.retryOptions.maxRetries; + if ((((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined && + ((_b = this.instancePreconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + maxRetries = 0; + } + let newFile; + if (options.destination instanceof file_js_1.File) { + newFile = options.destination; + } + else if (options.destination !== null && + typeof options.destination === 'string') { + // Use the string as the name of the file. + newFile = this.file(options.destination, { + encryptionKey: options.encryptionKey, + kmsKeyName: options.kmsKeyName, + preconditionOpts: this.instancePreconditionOpts, + }); + } + else { + // Resort to using the name of the incoming file. + const destination = path.basename(pathString); + newFile = this.file(destination, { + encryptionKey: options.encryptionKey, + kmsKeyName: options.kmsKeyName, + preconditionOpts: this.instancePreconditionOpts, + }); + } + upload(maxRetries); + } + /** + * @private + * + * @typedef {object} MakeAllFilesPublicPrivateOptions + * @property {boolean} [force] Suppress errors until all files have been + * processed. + * @property {boolean} [private] Make files private. + * @property {boolean} [public] Make files public. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @private + * + * @callback SetBucketMetadataCallback + * @param {?Error} err Request error, if any. + * @param {File[]} files Files that were updated. + */ + /** + * @typedef {array} MakeAllFilesPublicPrivateResponse + * @property {File[]} 0 List of files affected. + */ + /** + * Iterate over all of a bucket's files, calling `file.makePublic()` (public) + * or `file.makePrivate()` (private) on each. + * + * Operations are performed in parallel, up to 10 at once. The first error + * breaks the loop, and will execute the provided callback with it. Specify + * `{ force: true }` to suppress the errors. + * + * @private + * + * @param {MakeAllFilesPublicPrivateOptions} [options] Configuration options. + * @param {boolean} [options.force] Suppress errors until all files have been + * processed. + * @param {boolean} [options.private] Make files private. + * @param {boolean} [options.public] Make files public. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + + * @param {MakeAllFilesPublicPrivateCallback} callback Callback function. + * + * @return {Promise} + */ + makeAllFilesPublicPrivate_(optionsOrCallback, callback) { + const MAX_PARALLEL_LIMIT = 10; + const errors = []; + const updatedFiles = []; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const processFile = async (file) => { + try { + await (options.public ? file.makePublic() : file.makePrivate(options)); + updatedFiles.push(file); + } + catch (e) { + if (!options.force) { + throw e; + } + errors.push(e); + } + }; + this.getFiles(options) + .then(([files]) => { + const limit = (0, p_limit_1.default)(MAX_PARALLEL_LIMIT); + const promises = files.map(file => { + return limit(() => processFile(file)); + }); + return Promise.all(promises); + }) + .then(() => callback(errors.length > 0 ? errors : null, updatedFiles), err => callback(err, updatedFiles)); + } + getId() { + return this.id; + } + disableAutoRetryConditionallyIdempotent_( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + coreOpts, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + methodType, localPreconditionOptions) { + var _a, _b; + if (typeof coreOpts === 'object' && + ((_b = (_a = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _a === void 0 ? void 0 : _a.qs) === null || _b === void 0 ? void 0 : _b.ifMetagenerationMatch) === undefined && + (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifMetagenerationMatch) === undefined && + (methodType === AvailableServiceObjectMethods.setMetadata || + methodType === AvailableServiceObjectMethods.delete) && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) { + this.storage.retryOptions.autoRetry = false; + } + else if (this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + this.storage.retryOptions.autoRetry = false; + } + } +} +exports.Bucket = Bucket; +/*! Developer Documentation + * + * These methods can be auto-paginated. + */ +paginator_1.paginator.extend(Bucket, 'getFiles'); +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Bucket, { + exclude: ['cloudStorageURI', 'request', 'file', 'notification', 'restore'], +}); + + +/***/ }), + +/***/ 2658: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Channel = void 0; +const index_js_1 = __nccwpck_require__(1325); +const promisify_1 = __nccwpck_require__(206); +/** + * Create a channel object to interact with a Cloud Storage channel. + * + * See {@link https://cloud.google.com/storage/docs/object-change-notification| Object Change Notification} + * + * @class + * + * @param {string} id The ID of the channel. + * @param {string} resourceId The resource ID of the channel. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const channel = storage.channel('id', 'resource-id'); + * ``` + */ +class Channel extends index_js_1.ServiceObject { + constructor(storage, id, resourceId) { + const config = { + parent: storage, + baseUrl: '/channels', + // An ID shouldn't be included in the API requests. + // RE: + // https://github.com/GoogleCloudPlatform/google-cloud-node/issues/1145 + id: '', + methods: { + // Only need `request`. + }, + }; + super(config); + this.metadata.id = id; + this.metadata.resourceId = resourceId; + } + /** + * @typedef {array} StopResponse + * @property {object} 0 The full API response. + */ + /** + * @callback StopCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Stop this channel. + * + * @param {StopCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const channel = storage.channel('id', 'resource-id'); + * channel.stop(function(err, apiResponse) { + * if (!err) { + * // Channel stopped successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * channel.stop().then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + stop(callback) { + callback = callback || index_js_1.util.noop; + this.request({ + method: 'POST', + uri: '/stop', + json: this.metadata, + }, (err, apiResponse) => { + callback(err, apiResponse); + }); + } +} +exports.Channel = Channel; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Channel); + + +/***/ }), + +/***/ 4317: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _CRC32C_crc32c; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.CRC32C_EXTENSION_TABLE = exports.CRC32C_EXTENSIONS = exports.CRC32C_EXCEPTION_MESSAGES = exports.CRC32C_DEFAULT_VALIDATOR_GENERATOR = exports.CRC32C = void 0; +const fs_1 = __nccwpck_require__(9896); +/** + * Ported from {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc#L16-L59 github.com/google/crc32c} + */ +const CRC32C_EXTENSIONS = [ + 0x00000000, 0xf26b8303, 0xe13b70f7, 0x1350f3f4, 0xc79a971f, 0x35f1141c, + 0x26a1e7e8, 0xd4ca64eb, 0x8ad958cf, 0x78b2dbcc, 0x6be22838, 0x9989ab3b, + 0x4d43cfd0, 0xbf284cd3, 0xac78bf27, 0x5e133c24, 0x105ec76f, 0xe235446c, + 0xf165b798, 0x030e349b, 0xd7c45070, 0x25afd373, 0x36ff2087, 0xc494a384, + 0x9a879fa0, 0x68ec1ca3, 0x7bbcef57, 0x89d76c54, 0x5d1d08bf, 0xaf768bbc, + 0xbc267848, 0x4e4dfb4b, 0x20bd8ede, 0xd2d60ddd, 0xc186fe29, 0x33ed7d2a, + 0xe72719c1, 0x154c9ac2, 0x061c6936, 0xf477ea35, 0xaa64d611, 0x580f5512, + 0x4b5fa6e6, 0xb93425e5, 0x6dfe410e, 0x9f95c20d, 0x8cc531f9, 0x7eaeb2fa, + 0x30e349b1, 0xc288cab2, 0xd1d83946, 0x23b3ba45, 0xf779deae, 0x05125dad, + 0x1642ae59, 0xe4292d5a, 0xba3a117e, 0x4851927d, 0x5b016189, 0xa96ae28a, + 0x7da08661, 0x8fcb0562, 0x9c9bf696, 0x6ef07595, 0x417b1dbc, 0xb3109ebf, + 0xa0406d4b, 0x522bee48, 0x86e18aa3, 0x748a09a0, 0x67dafa54, 0x95b17957, + 0xcba24573, 0x39c9c670, 0x2a993584, 0xd8f2b687, 0x0c38d26c, 0xfe53516f, + 0xed03a29b, 0x1f682198, 0x5125dad3, 0xa34e59d0, 0xb01eaa24, 0x42752927, + 0x96bf4dcc, 0x64d4cecf, 0x77843d3b, 0x85efbe38, 0xdbfc821c, 0x2997011f, + 0x3ac7f2eb, 0xc8ac71e8, 0x1c661503, 0xee0d9600, 0xfd5d65f4, 0x0f36e6f7, + 0x61c69362, 0x93ad1061, 0x80fde395, 0x72966096, 0xa65c047d, 0x5437877e, + 0x4767748a, 0xb50cf789, 0xeb1fcbad, 0x197448ae, 0x0a24bb5a, 0xf84f3859, + 0x2c855cb2, 0xdeeedfb1, 0xcdbe2c45, 0x3fd5af46, 0x7198540d, 0x83f3d70e, + 0x90a324fa, 0x62c8a7f9, 0xb602c312, 0x44694011, 0x5739b3e5, 0xa55230e6, + 0xfb410cc2, 0x092a8fc1, 0x1a7a7c35, 0xe811ff36, 0x3cdb9bdd, 0xceb018de, + 0xdde0eb2a, 0x2f8b6829, 0x82f63b78, 0x709db87b, 0x63cd4b8f, 0x91a6c88c, + 0x456cac67, 0xb7072f64, 0xa457dc90, 0x563c5f93, 0x082f63b7, 0xfa44e0b4, + 0xe9141340, 0x1b7f9043, 0xcfb5f4a8, 0x3dde77ab, 0x2e8e845f, 0xdce5075c, + 0x92a8fc17, 0x60c37f14, 0x73938ce0, 0x81f80fe3, 0x55326b08, 0xa759e80b, + 0xb4091bff, 0x466298fc, 0x1871a4d8, 0xea1a27db, 0xf94ad42f, 0x0b21572c, + 0xdfeb33c7, 0x2d80b0c4, 0x3ed04330, 0xccbbc033, 0xa24bb5a6, 0x502036a5, + 0x4370c551, 0xb11b4652, 0x65d122b9, 0x97baa1ba, 0x84ea524e, 0x7681d14d, + 0x2892ed69, 0xdaf96e6a, 0xc9a99d9e, 0x3bc21e9d, 0xef087a76, 0x1d63f975, + 0x0e330a81, 0xfc588982, 0xb21572c9, 0x407ef1ca, 0x532e023e, 0xa145813d, + 0x758fe5d6, 0x87e466d5, 0x94b49521, 0x66df1622, 0x38cc2a06, 0xcaa7a905, + 0xd9f75af1, 0x2b9cd9f2, 0xff56bd19, 0x0d3d3e1a, 0x1e6dcdee, 0xec064eed, + 0xc38d26c4, 0x31e6a5c7, 0x22b65633, 0xd0ddd530, 0x0417b1db, 0xf67c32d8, + 0xe52cc12c, 0x1747422f, 0x49547e0b, 0xbb3ffd08, 0xa86f0efc, 0x5a048dff, + 0x8ecee914, 0x7ca56a17, 0x6ff599e3, 0x9d9e1ae0, 0xd3d3e1ab, 0x21b862a8, + 0x32e8915c, 0xc083125f, 0x144976b4, 0xe622f5b7, 0xf5720643, 0x07198540, + 0x590ab964, 0xab613a67, 0xb831c993, 0x4a5a4a90, 0x9e902e7b, 0x6cfbad78, + 0x7fab5e8c, 0x8dc0dd8f, 0xe330a81a, 0x115b2b19, 0x020bd8ed, 0xf0605bee, + 0x24aa3f05, 0xd6c1bc06, 0xc5914ff2, 0x37faccf1, 0x69e9f0d5, 0x9b8273d6, + 0x88d28022, 0x7ab90321, 0xae7367ca, 0x5c18e4c9, 0x4f48173d, 0xbd23943e, + 0xf36e6f75, 0x0105ec76, 0x12551f82, 0xe03e9c81, 0x34f4f86a, 0xc69f7b69, + 0xd5cf889d, 0x27a40b9e, 0x79b737ba, 0x8bdcb4b9, 0x988c474d, 0x6ae7c44e, + 0xbe2da0a5, 0x4c4623a6, 0x5f16d052, 0xad7d5351, +]; +exports.CRC32C_EXTENSIONS = CRC32C_EXTENSIONS; +const CRC32C_EXTENSION_TABLE = new Int32Array(CRC32C_EXTENSIONS); +exports.CRC32C_EXTENSION_TABLE = CRC32C_EXTENSION_TABLE; +const CRC32C_DEFAULT_VALIDATOR_GENERATOR = () => new CRC32C(); +exports.CRC32C_DEFAULT_VALIDATOR_GENERATOR = CRC32C_DEFAULT_VALIDATOR_GENERATOR; +const CRC32C_EXCEPTION_MESSAGES = { + INVALID_INIT_BASE64_RANGE: (l) => `base64-encoded data expected to equal 4 bytes, not ${l}`, + INVALID_INIT_BUFFER_LENGTH: (l) => `Buffer expected to equal 4 bytes, not ${l}`, + INVALID_INIT_INTEGER: (l) => `Number expected to be a safe, unsigned 32-bit integer, not ${l}`, +}; +exports.CRC32C_EXCEPTION_MESSAGES = CRC32C_EXCEPTION_MESSAGES; +class CRC32C { + /** + * Constructs a new `CRC32C` object. + * + * Reconstruction is recommended via the `CRC32C.from` static method. + * + * @param initialValue An initial CRC32C value - a signed 32-bit integer. + */ + constructor(initialValue = 0) { + /** Current CRC32C value */ + _CRC32C_crc32c.set(this, 0); + __classPrivateFieldSet(this, _CRC32C_crc32c, initialValue, "f"); + } + /** + * Calculates a CRC32C from a provided buffer. + * + * Implementation inspired from: + * - {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc github.com/google/crc32c} + * - {@link https://github.com/googleapis/python-crc32c/blob/a595e758c08df445a99c3bf132ee8e80a3ec4308/src/google_crc32c/python.py github.com/googleapis/python-crc32c} + * - {@link https://github.com/googleapis/java-storage/pull/1376/files github.com/googleapis/java-storage} + * + * @param data The `Buffer` to generate the CRC32C from + */ + update(data) { + let current = __classPrivateFieldGet(this, _CRC32C_crc32c, "f") ^ 0xffffffff; + for (const d of data) { + const tablePoly = CRC32C.CRC32C_EXTENSION_TABLE[(d ^ current) & 0xff]; + current = tablePoly ^ (current >>> 8); + } + __classPrivateFieldSet(this, _CRC32C_crc32c, current ^ 0xffffffff, "f"); + } + /** + * Validates a provided input to the current CRC32C value. + * + * @param input A Buffer, `CRC32C`-compatible object, base64-encoded data (string), or signed 32-bit integer + */ + validate(input) { + if (typeof input === 'number') { + return input === __classPrivateFieldGet(this, _CRC32C_crc32c, "f"); + } + else if (typeof input === 'string') { + return input === this.toString(); + } + else if (Buffer.isBuffer(input)) { + return Buffer.compare(input, this.toBuffer()) === 0; + } + else { + // `CRC32C`-like object + return input.toString() === this.toString(); + } + } + /** + * Returns a `Buffer` representation of the CRC32C value + */ + toBuffer() { + const buffer = Buffer.alloc(4); + buffer.writeInt32BE(__classPrivateFieldGet(this, _CRC32C_crc32c, "f")); + return buffer; + } + /** + * Returns a JSON-compatible, base64-encoded representation of the CRC32C value. + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify `JSON#stringify`} + */ + toJSON() { + return this.toString(); + } + /** + * Returns a base64-encoded representation of the CRC32C value. + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/toString `Object#toString`} + */ + toString() { + return this.toBuffer().toString('base64'); + } + /** + * Returns the `number` representation of the CRC32C value as a signed 32-bit integer + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/valueOf `Object#valueOf`} + */ + valueOf() { + return __classPrivateFieldGet(this, _CRC32C_crc32c, "f"); + } + /** + * Generates a `CRC32C` from a compatible buffer format. + * + * @param value 4-byte `ArrayBufferView`/`Buffer`/`TypedArray` + */ + static fromBuffer(value) { + let buffer; + if (Buffer.isBuffer(value)) { + buffer = value; + } + else if ('buffer' in value) { + // `ArrayBufferView` + buffer = Buffer.from(value.buffer); + } + else { + // `ArrayBuffer` + buffer = Buffer.from(value); + } + if (buffer.byteLength !== 4) { + throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_BUFFER_LENGTH(buffer.byteLength)); + } + return new CRC32C(buffer.readInt32BE()); + } + static async fromFile(file) { + const crc32c = new CRC32C(); + await new Promise((resolve, reject) => { + (0, fs_1.createReadStream)(file) + .on('data', (d) => { + if (typeof d === 'string') { + crc32c.update(Buffer.from(d)); + } + else { + crc32c.update(d); + } + }) + .on('end', () => resolve()) + .on('error', reject); + }); + return crc32c; + } + /** + * Generates a `CRC32C` from 4-byte base64-encoded data (string). + * + * @param value 4-byte base64-encoded data (string) + */ + static fromString(value) { + const buffer = Buffer.from(value, 'base64'); + if (buffer.byteLength !== 4) { + throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_BASE64_RANGE(buffer.byteLength)); + } + return this.fromBuffer(buffer); + } + /** + * Generates a `CRC32C` from a safe, unsigned 32-bit integer. + * + * @param value an unsigned 32-bit integer + */ + static fromNumber(value) { + if (!Number.isSafeInteger(value) || value > 2 ** 32 || value < -(2 ** 32)) { + throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_INTEGER(value)); + } + return new CRC32C(value); + } + /** + * Generates a `CRC32C` from a variety of compatable types. + * Note: strings are treated as input, not as file paths to read from. + * + * @param value A number, 4-byte `ArrayBufferView`/`Buffer`/`TypedArray`, or 4-byte base64-encoded data (string) + */ + static from(value) { + if (typeof value === 'number') { + return this.fromNumber(value); + } + else if (typeof value === 'string') { + return this.fromString(value); + } + else if ('byteLength' in value) { + // `ArrayBuffer` | `Buffer` | `ArrayBufferView` + return this.fromBuffer(value); + } + else { + // `CRC32CValidator`/`CRC32C`-like + return this.fromString(value.toString()); + } + } +} +exports.CRC32C = CRC32C; +_CRC32C_crc32c = new WeakMap(); +CRC32C.CRC32C_EXTENSIONS = CRC32C_EXTENSIONS; +CRC32C.CRC32C_EXTENSION_TABLE = CRC32C_EXTENSION_TABLE; + + +/***/ }), + +/***/ 9975: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +var _File_instances, _File_validateIntegrity; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.File = exports.FileExceptionMessages = exports.RequestError = exports.STORAGE_POST_POLICY_BASE_URL = exports.ActionToHTTPMethod = void 0; +const index_js_1 = __nccwpck_require__(1325); +const promisify_1 = __nccwpck_require__(206); +const crypto = __importStar(__nccwpck_require__(6982)); +const fs = __importStar(__nccwpck_require__(9896)); +const mime_1 = __importDefault(__nccwpck_require__(4900)); +const resumableUpload = __importStar(__nccwpck_require__(8043)); +const stream_1 = __nccwpck_require__(2203); +const zlib = __importStar(__nccwpck_require__(3106)); +const storage_js_1 = __nccwpck_require__(2848); +const bucket_js_1 = __nccwpck_require__(2887); +const acl_js_1 = __nccwpck_require__(6637); +const signer_js_1 = __nccwpck_require__(7319); +const util_js_1 = __nccwpck_require__(7325); +const duplexify_1 = __importDefault(__nccwpck_require__(9112)); +const util_js_2 = __nccwpck_require__(4557); +const crc32c_js_1 = __nccwpck_require__(4317); +const hash_stream_validator_js_1 = __nccwpck_require__(4873); +const async_retry_1 = __importDefault(__nccwpck_require__(5195)); +var ActionToHTTPMethod; +(function (ActionToHTTPMethod) { + ActionToHTTPMethod["read"] = "GET"; + ActionToHTTPMethod["write"] = "PUT"; + ActionToHTTPMethod["delete"] = "DELETE"; + ActionToHTTPMethod["resumable"] = "POST"; +})(ActionToHTTPMethod || (exports.ActionToHTTPMethod = ActionToHTTPMethod = {})); +/** + * @deprecated - no longer used + */ +exports.STORAGE_POST_POLICY_BASE_URL = 'https://storage.googleapis.com'; +/** + * @private + */ +const GS_URL_REGEXP = /^gs:\/\/([a-z0-9_.-]+)\/(.+)$/; +/** + * @private + * This regex will match compressible content types. These are primarily text/*, +json, +text, +xml content types. + * This was based off of mime-db and may periodically need to be updated if new compressible content types become + * standards. + */ +const COMPRESSIBLE_MIME_REGEX = new RegExp([ + /^text\/|application\/ecmascript|application\/javascript|application\/json/, + /|application\/postscript|application\/rtf|application\/toml|application\/vnd.dart/, + /|application\/vnd.ms-fontobject|application\/wasm|application\/x-httpd-php|application\/x-ns-proxy-autoconfig/, + /|application\/x-sh(?!ockwave-flash)|application\/x-tar|application\/x-virtualbox-hdd|application\/x-virtualbox-ova|application\/x-virtualbox-ovf/, + /|^application\/x-virtualbox-vbox$|application\/x-virtualbox-vdi|application\/x-virtualbox-vhd|application\/x-virtualbox-vmdk/, + /|application\/xml|application\/xml-dtd|font\/otf|font\/ttf|image\/bmp|image\/vnd.adobe.photoshop|image\/vnd.microsoft.icon/, + /|image\/vnd.ms-dds|image\/x-icon|image\/x-ms-bmp|message\/rfc822|model\/gltf-binary|\+json|\+text|\+xml|\+yaml/, +] + .map(r => r.source) + .join(''), 'i'); +class RequestError extends Error { +} +exports.RequestError = RequestError; +const SEVEN_DAYS = 7 * 24 * 60 * 60; +const GS_UTIL_URL_REGEX = /(gs):\/\/([a-z0-9_.-]+)\/(.+)/g; +const HTTPS_PUBLIC_URL_REGEX = /(https):\/\/(storage\.googleapis\.com)\/([a-z0-9_.-]+)\/(.+)/g; +var FileExceptionMessages; +(function (FileExceptionMessages) { + FileExceptionMessages["EXPIRATION_TIME_NA"] = "An expiration time is not available."; + FileExceptionMessages["DESTINATION_NO_NAME"] = "Destination file should have a name."; + FileExceptionMessages["INVALID_VALIDATION_FILE_RANGE"] = "Cannot use validation with file ranges (start/end)."; + FileExceptionMessages["MD5_NOT_AVAILABLE"] = "MD5 verification was specified, but is not available for the requested object. MD5 is not available for composite objects."; + FileExceptionMessages["EQUALS_CONDITION_TWO_ELEMENTS"] = "Equals condition must be an array of 2 elements."; + FileExceptionMessages["STARTS_WITH_TWO_ELEMENTS"] = "StartsWith condition must be an array of 2 elements."; + FileExceptionMessages["CONTENT_LENGTH_RANGE_MIN_MAX"] = "ContentLengthRange must have numeric min & max fields."; + FileExceptionMessages["DOWNLOAD_MISMATCH"] = "The downloaded data did not match the data from the server. To be sure the content is the same, you should download the file again."; + FileExceptionMessages["UPLOAD_MISMATCH_DELETE_FAIL"] = "The uploaded data did not match the data from the server.\n As a precaution, we attempted to delete the file, but it was not successful.\n To be sure the content is the same, you should try removing the file manually,\n then uploading the file again.\n \n\nThe delete attempt failed with this message:\n\n "; + FileExceptionMessages["UPLOAD_MISMATCH"] = "The uploaded data did not match the data from the server.\n As a precaution, the file has been deleted.\n To be sure the content is the same, you should try uploading the file again."; + FileExceptionMessages["MD5_RESUMED_UPLOAD"] = "MD5 cannot be used with a continued resumable upload as MD5 cannot be extended from an existing value"; + FileExceptionMessages["MISSING_RESUME_CRC32C_FINAL_UPLOAD"] = "The CRC32C is missing for the final portion of a resumed upload, which is required for validation. Please provide `resumeCRC32C` if validation is required, or disable `validation`."; +})(FileExceptionMessages || (exports.FileExceptionMessages = FileExceptionMessages = {})); +/** + * A File object is created from your {@link Bucket} object using + * {@link Bucket#file}. + * + * @class + */ +class File extends index_js_1.ServiceObject { + /** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * An ACL consists of one or more entries, where each entry grants permissions + * to an entity. Permissions define the actions that can be performed against + * an object or bucket (for example, `READ` or `WRITE`); the entity defines + * who the permission applies to (for example, a specific user or group of + * users). + * + * The `acl` object on a File instance provides methods to get you a list of + * the ACLs defined on your bucket, as well as set, update, and delete them. + * + * See {@link http://goo.gl/6qBBPO| About Access Control lists} + * + * @name File#acl + * @mixes Acl + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * //- + * // Make a file publicly readable. + * //- + * const options = { + * entity: 'allUsers', + * role: storage.acl.READER_ROLE + * }; + * + * file.acl.add(options, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + /** + * The API-formatted resource description of the file. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name File#metadata + * @type {object} + */ + /** + * The file's name. + * @name File#name + * @type {string} + */ + /** + * @callback Crc32cGeneratorToStringCallback + * A method returning the CRC32C as a base64-encoded string. + * + * @returns {string} + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ + /** + * @callback Crc32cGeneratorValidateCallback + * A method validating a base64-encoded CRC32C string. + * + * @param {string} [value] base64-encoded CRC32C string to validate + * @returns {boolean} + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + **/ + /** + * @callback Crc32cGeneratorUpdateCallback + * A method for passing `Buffer`s for CRC32C generation. + * + * @param {Buffer} [data] data to update CRC32C value with + * @returns {undefined} + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + **/ + /** + * @typedef {object} CRC32CValidator + * @property {Crc32cGeneratorToStringCallback} + * @property {Crc32cGeneratorValidateCallback} + * @property {Crc32cGeneratorUpdateCallback} + */ + /** + * @callback Crc32cGeneratorCallback + * @returns {CRC32CValidator} + */ + /** + * @typedef {object} FileOptions Options passed to the File constructor. + * @property {string} [encryptionKey] A custom encryption key. + * @property {number} [generation] Generation to scope the file to. + * @property {string} [kmsKeyName] Cloud KMS Key used to encrypt this + * object, if the object is encrypted by such a key. Limited availability; + * usable only by enabled projects. + * @property {string} [userProject] The ID of the project which will be + * billed for all requests made from File object. + * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C} + */ + /** + * Constructs a file object. + * + * @param {Bucket} bucket The Bucket instance this file is + * attached to. + * @param {string} name The name of the remote file. + * @param {FileOptions} [options] Configuration options. + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * ``` + */ + constructor(bucket, name, options = {}) { + var _a, _b; + const requestQueryObject = {}; + let generation; + if (options.generation !== null) { + if (typeof options.generation === 'string') { + generation = Number(options.generation); + } + else { + generation = options.generation; + } + if (!isNaN(generation)) { + requestQueryObject.generation = generation; + } + } + Object.assign(requestQueryObject, options.preconditionOpts); + const userProject = options.userProject || bucket.userProject; + if (typeof userProject === 'string') { + requestQueryObject.userProject = userProject; + } + const methods = { + /** + * @typedef {array} DeleteFileResponse + * @property {object} 0 The full API response. + */ + /** + * @callback DeleteFileCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Delete the file. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/delete| Objects: delete API Documentation} + * + * @method File#delete + * @param {object} [options] Configuration options. + * @param {boolean} [options.ignoreNotFound = false] Ignore an error if + * the file does not exist. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {DeleteFileCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * file.delete(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.delete().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_delete_file + * Another example: + */ + delete: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} FileExistsResponse + * @property {boolean} 0 Whether the {@link File} exists. + */ + /** + * @callback FileExistsCallback + * @param {?Error} err Request error, if any. + * @param {boolean} exists Whether the {@link File} exists. + */ + /** + * Check if the file exists. + * + * @method File#exists + * @param {options} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {FileExistsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.exists(function(err, exists) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.exists().then(function(data) { + * const exists = data[0]; + * }); + * ``` + */ + exists: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} GetFileResponse + * @property {File} 0 The {@link File}. + * @property {object} 1 The full API response. + */ + /** + * @callback GetFileCallback + * @param {?Error} err Request error, if any. + * @param {File} file The {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * Get a file object and its metadata if it exists. + * + * @method File#get + * @param {options} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {number} [options.generation] The generation number to get + * @param {string} [options.restoreToken] If this is a soft-deleted object in an HNS-enabled bucket, returns the restore token which will + * be necessary to restore it if there's a name conflict with another object. + * @param {boolean} [options.softDeleted] If true, returns the soft-deleted object. + Object `generation` is required if `softDeleted` is set to True. + * @param {GetFileCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.get(function(err, file, apiResponse) { + * // file.metadata` has been populated. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.get().then(function(data) { + * const file = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + get: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} GetFileMetadataResponse + * @property {object} 0 The {@link File} metadata. + * @property {object} 1 The full API response. + */ + /** + * @callback GetFileMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata The {@link File} metadata. + * @param {object} apiResponse The full API response. + */ + /** + * Get the file's metadata. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/get| Objects: get API Documentation} + * + * @method File#getMetadata + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetFileMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.getMetadata(function(err, metadata, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.getMetadata().then(function(data) { + * const metadata = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_get_metadata + * Another example: + */ + getMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {object} SetFileMetadataOptions Configuration options for File#setMetadata(). + * @param {string} [userProject] The ID of the project which will be billed for the request. + */ + /** + * @callback SetFileMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} SetFileMetadataResponse + * @property {object} 0 The full API response. + */ + /** + * Merge the given metadata with the current remote file's metadata. This + * will set metadata if it was previously unset or update previously set + * metadata. To unset previously set metadata, set its value to null. + * + * You can set custom key/value pairs in the metadata key of the given + * object, however the other properties outside of this object must adhere + * to the {@link https://goo.gl/BOnnCK| official API documentation}. + * + * + * See the examples below for more information. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/patch| Objects: patch API Documentation} + * + * @method File#setMetadata + * @param {object} [metadata] The metadata you wish to update. + * @param {SetFileMetadataOptions} [options] Configuration options. + * @param {SetFileMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * const metadata = { + * contentType: 'application/x-font-ttf', + * metadata: { + * my: 'custom', + * properties: 'go here' + * } + * }; + * + * file.setMetadata(metadata, function(err, apiResponse) {}); + * + * // Assuming current metadata = { hello: 'world', unsetMe: 'will do' } + * file.setMetadata({ + * metadata: { + * abc: '123', // will be set. + * unsetMe: null, // will be unset (deleted). + * hello: 'goodbye' // will be updated from 'world' to 'goodbye'. + * } + * }, function(err, apiResponse) { + * // metadata should now be { abc: '123', hello: 'goodbye' } + * }); + * + * //- + * // Set a temporary hold on this file from its bucket's retention period + * // configuration. + * // + * file.setMetadata({ + * temporaryHold: true + * }, function(err, apiResponse) {}); + * + * //- + * // Alternatively, you may set a temporary hold. This will follow the + * // same behavior as an event-based hold, with the exception that the + * // bucket's retention policy will not renew for this file from the time + * // the hold is released. + * //- + * file.setMetadata({ + * eventBasedHold: true + * }, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.setMetadata(metadata).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + setMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + }; + super({ + parent: bucket, + baseUrl: '/o', + id: encodeURIComponent(name), + methods, + }); + _File_instances.add(this); + this.bucket = bucket; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this.storage = bucket.parent; + // @TODO Can this duplicate code from above be avoided? + if (options.generation !== null) { + let generation; + if (typeof options.generation === 'string') { + generation = Number(options.generation); + } + else { + generation = options.generation; + } + if (!isNaN(generation)) { + this.generation = generation; + } + } + this.kmsKeyName = options.kmsKeyName; + this.userProject = userProject; + this.name = name; + if (options.encryptionKey) { + this.setEncryptionKey(options.encryptionKey); + } + this.acl = new acl_js_1.Acl({ + request: this.request.bind(this), + pathPrefix: '/acl', + }); + this.crc32cGenerator = + options.crc32cGenerator || this.bucket.crc32cGenerator; + this.instanceRetryValue = (_b = (_a = this.storage) === null || _a === void 0 ? void 0 : _a.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry; + this.instancePreconditionOpts = options === null || options === void 0 ? void 0 : options.preconditionOpts; + } + /** + * The object's Cloud Storage URI (`gs://`) + * + * @example + * ```ts + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('image.png'); + * + * // `gs://my-bucket/image.png` + * const href = file.cloudStorageURI.href; + * ``` + */ + get cloudStorageURI() { + const uri = this.bucket.cloudStorageURI; + uri.pathname = this.name; + return uri; + } + /** + * A helper method for determining if a request should be retried based on preconditions. + * This should only be used for methods where the idempotency is determined by + * `ifGenerationMatch` + * @private + * + * A request should not be retried under the following conditions: + * - if precondition option `ifGenerationMatch` is not set OR + * - if `idempotencyStrategy` is set to `RetryNever` + */ + shouldRetryBasedOnPreconditionAndIdempotencyStrat(options) { + var _a; + return !(((options === null || options === void 0 ? void 0 : options.ifGenerationMatch) === undefined && + ((_a = this.instancePreconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever); + } + /** + * @typedef {array} CopyResponse + * @property {File} 0 The copied {@link File}. + * @property {object} 1 The full API response. + */ + /** + * @callback CopyCallback + * @param {?Error} err Request error, if any. + * @param {File} copiedFile The copied {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} CopyOptions Configuration options for File#copy(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @property {string} [cacheControl] The cacheControl setting for the new file. + * @property {string} [contentEncoding] The contentEncoding setting for the new file. + * @property {string} [contentType] The contentType setting for the new file. + * @property {string} [destinationKmsKeyName] Resource name of the Cloud + * KMS key, of the form + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`, + * that will be used to encrypt the object. Overwrites the object + * metadata's `kms_key_name` value, if any. + * @property {Metadata} [metadata] Metadata to specify on the copied file. + * @property {string} [predefinedAcl] Set the ACL for the new file. + * @property {string} [token] A previously-returned `rewriteToken` from an + * unfinished rewrite request. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Copy this file to another file. By default, this will copy the file to the + * same bucket, but you can choose to copy it to another Bucket by providing + * a Bucket or File object or a URL starting with "gs://". + * The generation of the file will not be preserved. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/rewrite| Objects: rewrite API Documentation} + * + * @throws {Error} If the destination file is not provided. + * + * @param {string|Bucket|File} destination Destination file. + * @param {CopyOptions} [options] Configuration options. See an + * @param {CopyCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // You can pass in a variety of types for the destination. + * // + * // For all of the below examples, assume we are working with the following + * // Bucket and File objects. + * //- + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('my-image.png'); + * + * //- + * // If you pass in a string for the destination, the file is copied to its + * // current bucket, under the new name provided. + * //- + * file.copy('my-image-copy.png', function(err, copiedFile, apiResponse) { + * // `my-bucket` now contains: + * // - "my-image.png" + * // - "my-image-copy.png" + * + * // `copiedFile` is an instance of a File object that refers to your new + * // file. + * }); + * + * //- + * // If you pass in a string starting with "gs://" for the destination, the + * // file is copied to the other bucket and under the new name provided. + * //- + * const newLocation = 'gs://another-bucket/my-image-copy.png'; + * file.copy(newLocation, function(err, copiedFile, apiResponse) { + * // `my-bucket` still contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-image-copy.png" + * + * // `copiedFile` is an instance of a File object that refers to your new + * // file. + * }); + * + * //- + * // If you pass in a Bucket object, the file will be copied to that bucket + * // using the same name. + * //- + * const anotherBucket = storage.bucket('another-bucket'); + * file.copy(anotherBucket, function(err, copiedFile, apiResponse) { + * // `my-bucket` still contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-image.png" + * + * // `copiedFile` is an instance of a File object that refers to your new + * // file. + * }); + * + * //- + * // If you pass in a File object, you have complete control over the new + * // bucket and filename. + * //- + * const anotherFile = anotherBucket.file('my-awesome-image.png'); + * file.copy(anotherFile, function(err, copiedFile, apiResponse) { + * // `my-bucket` still contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-awesome-image.png" + * + * // Note: + * // The `copiedFile` parameter is equal to `anotherFile`. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.copy(newLocation).then(function(data) { + * const newFile = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_copy_file + * Another example: + */ + copy(destination, optionsOrCallback, callback) { + var _a, _b; + const noDestinationError = new Error(FileExceptionMessages.DESTINATION_NO_NAME); + if (!destination) { + throw noDestinationError; + } + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = { ...optionsOrCallback }; + } + callback = callback || index_js_1.util.noop; + let destBucket; + let destName; + let newFile; + if (typeof destination === 'string') { + const parsedDestination = GS_URL_REGEXP.exec(destination); + if (parsedDestination !== null && parsedDestination.length === 3) { + destBucket = this.storage.bucket(parsedDestination[1]); + destName = parsedDestination[2]; + } + else { + destBucket = this.bucket; + destName = destination; + } + } + else if (destination instanceof bucket_js_1.Bucket) { + destBucket = destination; + destName = this.name; + } + else if (destination instanceof File) { + destBucket = destination.bucket; + destName = destination.name; + newFile = destination; + } + else { + throw noDestinationError; + } + const query = {}; + if (this.generation !== undefined) { + query.sourceGeneration = this.generation; + } + if (options.token !== undefined) { + query.rewriteToken = options.token; + } + if (options.userProject !== undefined) { + query.userProject = options.userProject; + delete options.userProject; + } + if (options.predefinedAcl !== undefined) { + query.destinationPredefinedAcl = options.predefinedAcl; + delete options.predefinedAcl; + } + newFile = newFile || destBucket.file(destName); + const headers = {}; + if (this.encryptionKey !== undefined) { + headers['x-goog-copy-source-encryption-algorithm'] = 'AES256'; + headers['x-goog-copy-source-encryption-key'] = this.encryptionKeyBase64; + headers['x-goog-copy-source-encryption-key-sha256'] = + this.encryptionKeyHash; + } + if (newFile.encryptionKey !== undefined) { + this.setEncryptionKey(newFile.encryptionKey); + } + else if (options.destinationKmsKeyName !== undefined) { + query.destinationKmsKeyName = options.destinationKmsKeyName; + delete options.destinationKmsKeyName; + } + else if (newFile.kmsKeyName !== undefined) { + query.destinationKmsKeyName = newFile.kmsKeyName; + } + if (query.destinationKmsKeyName) { + this.kmsKeyName = query.destinationKmsKeyName; + const keyIndex = this.interceptors.indexOf(this.encryptionKeyInterceptor); + if (keyIndex > -1) { + this.interceptors.splice(keyIndex, 1); + } + } + if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) { + this.storage.retryOptions.autoRetry = false; + } + if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined) { + query.ifGenerationMatch = (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch; + delete options.preconditionOpts; + } + this.request({ + method: 'POST', + uri: `/rewriteTo/b/${destBucket.name}/o/${encodeURIComponent(newFile.name)}`, + qs: query, + json: options, + headers, + }, (err, resp) => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + if (err) { + callback(err, null, resp); + return; + } + if (resp.rewriteToken) { + const options = { + token: resp.rewriteToken, + }; + if (query.userProject) { + options.userProject = query.userProject; + } + if (query.destinationKmsKeyName) { + options.destinationKmsKeyName = query.destinationKmsKeyName; + } + this.copy(newFile, options, callback); + return; + } + callback(null, newFile, resp); + }); + } + /** + * @typedef {object} CreateReadStreamOptions Configuration options for File#createReadStream. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string|boolean} [validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with a + * CRC32c checksum. You may use MD5 if preferred, but that hash is not + * supported for composite objects. An error will be raised if MD5 is + * specified but is not available. You may also choose to skip validation + * completely, however this is **not recommended**. + * @property {number} [start] A byte offset to begin the file's download + * from. Default is 0. NOTE: Byte ranges are inclusive; that is, + * `options.start = 0` and `options.end = 999` represent the first 1000 + * bytes in a file or object. NOTE: when specifying a byte range, data + * integrity is not available. + * @property {number} [end] A byte offset to stop reading the file at. + * NOTE: Byte ranges are inclusive; that is, `options.start = 0` and + * `options.end = 999` represent the first 1000 bytes in a file or object. + * NOTE: when specifying a byte range, data integrity is not available. + * @property {boolean} [decompress=true] Disable auto decompression of the + * received data. By default this option is set to `true`. + * Applicable in cases where the data was uploaded with + * `gzip: true` option. See {@link File#createWriteStream}. + */ + /** + * Create a readable stream to read the contents of the remote file. It can be + * piped to a writable stream or listened to for 'data' events to read a + * file's contents. + * + * In the unlikely event there is a mismatch between what you downloaded and + * the version in your Bucket, your error handler will receive an error with + * code "CONTENT_DOWNLOAD_MISMATCH". If you receive this error, the best + * recourse is to try downloading the file again. + * + * NOTE: Readable streams will emit the `end` event when the file is fully + * downloaded. + * + * @param {CreateReadStreamOptions} [options] Configuration options. + * @returns {ReadableStream} + * + * @example + * ``` + * //- + * //

Downloading a File

+ * // + * // The example below demonstrates how we can reference a remote file, then + * // pipe its contents to a local file. This is effectively creating a local + * // backup of your remote data. + * //- + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * const fs = require('fs'); + * const remoteFile = bucket.file('image.png'); + * const localFilename = '/Users/stephen/Photos/image.png'; + * + * remoteFile.createReadStream() + * .on('error', function(err) {}) + * .on('response', function(response) { + * // Server connected and responded with the specified status and headers. + * }) + * .on('end', function() { + * // The file is fully downloaded. + * }) + * .pipe(fs.createWriteStream(localFilename)); + * + * //- + * // To limit the downloaded data to only a byte range, pass an options + * // object. + * //- + * const logFile = myBucket.file('access_log'); + * logFile.createReadStream({ + * start: 10000, + * end: 20000 + * }) + * .on('error', function(err) {}) + * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt')); + * + * //- + * // To read a tail byte range, specify only `options.end` as a negative + * // number. + * //- + * const logFile = myBucket.file('access_log'); + * logFile.createReadStream({ + * end: -100 + * }) + * .on('error', function(err) {}) + * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt')); + * ``` + */ + createReadStream(options = {}) { + options = Object.assign({ decompress: true }, options); + const rangeRequest = typeof options.start === 'number' || typeof options.end === 'number'; + const tailRequest = options.end < 0; + let validateStream = undefined; + let request = undefined; + const throughStream = new util_js_2.PassThroughShim(); + let crc32c = true; + let md5 = false; + if (typeof options.validation === 'string') { + const value = options.validation.toLowerCase().trim(); + crc32c = value === 'crc32c'; + md5 = value === 'md5'; + } + else if (options.validation === false) { + crc32c = false; + } + const shouldRunValidation = !rangeRequest && (crc32c || md5); + if (rangeRequest) { + if (typeof options.validation === 'string' || + options.validation === true) { + throw new Error(FileExceptionMessages.INVALID_VALIDATION_FILE_RANGE); + } + // Range requests can't receive data integrity checks. + crc32c = false; + md5 = false; + } + const onComplete = (err) => { + if (err) { + // There is an issue with node-fetch 2.x that if the stream errors the underlying socket connection is not closed. + // This causes a memory leak, so cleanup the sockets manually here by destroying the agent. + if (request === null || request === void 0 ? void 0 : request.agent) { + request.agent.destroy(); + } + throughStream.destroy(err); + } + }; + // We listen to the response event from the request stream so that we + // can... + // + // 1) Intercept any data from going to the user if an error occurred. + // 2) Calculate the hashes from the http.IncomingMessage response + // stream, + // which will return the bytes from the source without decompressing + // gzip'd content. We then send it through decompressed, if + // applicable, to the user. + const onResponse = (err, _body, rawResponseStream) => { + if (err) { + // Get error message from the body. + this.getBufferFromReadable(rawResponseStream).then(body => { + err.message = body.toString('utf8'); + throughStream.destroy(err); + }); + return; + } + request = rawResponseStream.request; + const headers = rawResponseStream.toJSON().headers; + const isCompressed = headers['content-encoding'] === 'gzip'; + const hashes = {}; + // The object is safe to validate if: + // 1. It was stored gzip and returned to us gzip OR + // 2. It was never stored as gzip + const safeToValidate = (headers['x-goog-stored-content-encoding'] === 'gzip' && + isCompressed) || + headers['x-goog-stored-content-encoding'] === 'identity'; + const transformStreams = []; + if (shouldRunValidation) { + // The x-goog-hash header should be set with a crc32c and md5 hash. + // ex: headers['x-goog-hash'] = 'crc32c=xxxx,md5=xxxx' + if (typeof headers['x-goog-hash'] === 'string') { + headers['x-goog-hash'] + .split(',') + .forEach((hashKeyValPair) => { + const delimiterIndex = hashKeyValPair.indexOf('='); + const hashType = hashKeyValPair.substring(0, delimiterIndex); + const hashValue = hashKeyValPair.substring(delimiterIndex + 1); + hashes[hashType] = hashValue; + }); + } + validateStream = new hash_stream_validator_js_1.HashStreamValidator({ + crc32c, + md5, + crc32cGenerator: this.crc32cGenerator, + crc32cExpected: hashes.crc32c, + md5Expected: hashes.md5, + }); + } + if (md5 && !hashes.md5) { + const hashError = new RequestError(FileExceptionMessages.MD5_NOT_AVAILABLE); + hashError.code = 'MD5_NOT_AVAILABLE'; + throughStream.destroy(hashError); + return; + } + if (safeToValidate && shouldRunValidation && validateStream) { + transformStreams.push(validateStream); + } + if (isCompressed && options.decompress) { + transformStreams.push(zlib.createGunzip()); + } + (0, stream_1.pipeline)(rawResponseStream, ...transformStreams, throughStream, onComplete); + }; + // Authenticate the request, then pipe the remote API request to the stream + // returned to the user. + const makeRequest = () => { + const query = { alt: 'media' }; + if (this.generation) { + query.generation = this.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + const headers = { + 'Accept-Encoding': 'gzip', + 'Cache-Control': 'no-store', + }; + if (rangeRequest) { + const start = typeof options.start === 'number' ? options.start : '0'; + const end = typeof options.end === 'number' ? options.end : ''; + headers.Range = `bytes=${tailRequest ? end : `${start}-${end}`}`; + } + const reqOpts = { + uri: '', + headers, + qs: query, + }; + if (options[util_js_1.GCCL_GCS_CMD_KEY]) { + reqOpts[util_js_1.GCCL_GCS_CMD_KEY] = options[util_js_1.GCCL_GCS_CMD_KEY]; + } + this.requestStream(reqOpts) + .on('error', err => { + throughStream.destroy(err); + }) + .on('response', res => { + throughStream.emit('response', res); + index_js_1.util.handleResp(null, res, null, onResponse); + }) + .resume(); + }; + throughStream.on('reading', makeRequest); + return throughStream; + } + /** + * @callback CreateResumableUploadCallback + * @param {?Error} err Request error, if any. + * @param {string} uri The resumable upload's unique session URI. + */ + /** + * @typedef {array} CreateResumableUploadResponse + * @property {string} 0 The resumable upload's unique session URI. + */ + /** + * @typedef {object} CreateResumableUploadOptions + * @property {object} [metadata] Metadata to set on the file. + * @property {number} [offset] The starting byte of the upload stream for resuming an interrupted upload. + * @property {string} [origin] Origin header to set for the upload. + * @property {string} [predefinedAcl] Apply a predefined set of access + * controls to this object. + * + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @property {boolean} [private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @property {boolean} [public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string} [chunkSize] Create a separate request per chunk. This + * value is in bytes and should be a multiple of 256 KiB (2^18). + * {@link https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload| We recommend using at least 8 MiB for the chunk size.} + */ + /** + * Create a unique resumable upload session URI. This is the first step when + * performing a resumable upload. + * + * See the {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload| Resumable upload guide} + * for more on how the entire process works. + * + *

Note

+ * + * If you are just looking to perform a resumable upload without worrying + * about any of the details, see {@link File#createWriteStream}. Resumable + * uploads are performed by default. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload| Resumable upload guide} + * + * @param {CreateResumableUploadOptions} [options] Configuration options. + * @param {CreateResumableUploadCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * file.createResumableUpload(function(err, uri) { + * if (!err) { + * // `uri` can be used to PUT data to. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.createResumableUpload().then(function(data) { + * const uri = data[0]; + * }); + * ``` + */ + createResumableUpload(optionsOrCallback, callback) { + var _a, _b; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const retryOptions = this.storage.retryOptions; + if ((((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined && + ((_b = this.instancePreconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + retryOptions.autoRetry = false; + } + resumableUpload.createURI({ + authClient: this.storage.authClient, + apiEndpoint: this.storage.apiEndpoint, + bucket: this.bucket.name, + customRequestOptions: this.getRequestInterceptors().reduce((reqOpts, interceptorFn) => interceptorFn(reqOpts), {}), + file: this.name, + generation: this.generation, + key: this.encryptionKey, + kmsKeyName: this.kmsKeyName, + metadata: options.metadata, + offset: options.offset, + origin: options.origin, + predefinedAcl: options.predefinedAcl, + private: options.private, + public: options.public, + userProject: options.userProject || this.userProject, + retryOptions: retryOptions, + params: (options === null || options === void 0 ? void 0 : options.preconditionOpts) || this.instancePreconditionOpts, + universeDomain: this.bucket.storage.universeDomain, + [util_js_1.GCCL_GCS_CMD_KEY]: options[util_js_1.GCCL_GCS_CMD_KEY], + }, callback); + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + } + /** + * @typedef {object} CreateWriteStreamOptions Configuration options for File#createWriteStream(). + * @property {string} [contentType] Alias for + * `options.metadata.contentType`. If set to `auto`, the file name is used + * to determine the contentType. + * @property {string|boolean} [gzip] If true, automatically gzip the file. + * If set to `auto`, the contentType is used to determine if the file + * should be gzipped. This will set `options.metadata.contentEncoding` to + * `gzip` if necessary. + * @property {object} [metadata] See the examples below or + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body} + * for more details. + * @property {number} [offset] The starting byte of the upload stream, for + * resuming an interrupted upload. Defaults to 0. + * @property {string} [predefinedAcl] Apply a predefined set of access + * controls to this object. + * + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @property {boolean} [private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @property {boolean} [public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @property {boolean} [resumable] Force a resumable upload. NOTE: When + * working with streams, the file format and size is unknown until it's + * completely consumed. Because of this, it's best for you to be explicit + * for what makes sense given your input. + * @property {number} [timeout=60000] Set the HTTP request timeout in + * milliseconds. This option is not available for resumable uploads. + * Default: `60000` + * @property {string} [uri] The URI for an already-created resumable + * upload. See {@link File#createResumableUpload}. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string|boolean} [validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with a + * CRC32c checksum. You may use MD5 if preferred, but that hash is not + * supported for composite objects. An error will be raised if MD5 is + * specified but is not available. You may also choose to skip validation + * completely, however this is **not recommended**. In addition to specifying + * validation type, providing `metadata.crc32c` or `metadata.md5Hash` will + * cause the server to perform validation in addition to client validation. + * NOTE: Validation is automatically skipped for objects that were + * uploaded using the `gzip` option and have already compressed content. + */ + /** + * Create a writable stream to overwrite the contents of the file in your + * bucket. + * + * A File object can also be used to create files for the first time. + * + * Resumable uploads are automatically enabled and must be shut off explicitly + * by setting `options.resumable` to `false`. + * + * + *

+ * There is some overhead when using a resumable upload that can cause + * noticeable performance degradation while uploading a series of small + * files. When uploading files less than 10MB, it is recommended that the + * resumable feature is disabled. + *

+ * + * NOTE: Writable streams will emit the `finish` event when the file is fully + * uploaded. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload Upload Options (Simple or Resumable)} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert Objects: insert API Documentation} + * + * @param {CreateWriteStreamOptions} [options] Configuration options. + * @returns {WritableStream} + * + * @example + * ``` + * const fs = require('fs'); + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * //

Uploading a File

+ * // + * // Now, consider a case where we want to upload a file to your bucket. You + * // have the option of using {@link Bucket#upload}, but that is just + * // a convenience method which will do the following. + * //- + * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg') + * .pipe(file.createWriteStream()) + * .on('error', function(err) {}) + * .on('finish', function() { + * // The file upload is complete. + * }); + * + * //- + * //

Uploading a File with gzip compression

+ * //- + * fs.createReadStream('/Users/stephen/site/index.html') + * .pipe(file.createWriteStream({ gzip: true })) + * .on('error', function(err) {}) + * .on('finish', function() { + * // The file upload is complete. + * }); + * + * //- + * // Downloading the file with `createReadStream` will automatically decode + * // the file. + * //- + * + * //- + * //

Uploading a File with Metadata

+ * // + * // One last case you may run into is when you want to upload a file to your + * // bucket and set its metadata at the same time. Like above, you can use + * // {@link Bucket#upload} to do this, which is just a wrapper around + * // the following. + * //- + * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg') + * .pipe(file.createWriteStream({ + * metadata: { + * contentType: 'image/jpeg', + * metadata: { + * custom: 'metadata' + * } + * } + * })) + * .on('error', function(err) {}) + * .on('finish', function() { + * // The file upload is complete. + * }); + * ``` + * + * //- + * //

Continuing a Resumable Upload

+ * // + * // One can capture a `uri` from a resumable upload to reuse later. + * // Additionally, for validation, one can also capture and pass `crc32c`. + * //- + * let uri: string | undefined = undefined; + * let resumeCRC32C: string | undefined = undefined; + * + * fs.createWriteStream() + * .on('uri', link => {uri = link}) + * .on('crc32', crc32c => {resumeCRC32C = crc32c}); + * + * // later... + * fs.createWriteStream({uri, resumeCRC32C}); + */ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + createWriteStream(options = {}) { + var _a; + (_a = options.metadata) !== null && _a !== void 0 ? _a : (options.metadata = {}); + if (options.contentType) { + options.metadata.contentType = options.contentType; + } + if (!options.metadata.contentType || + options.metadata.contentType === 'auto') { + const detectedContentType = mime_1.default.getType(this.name); + if (detectedContentType) { + options.metadata.contentType = detectedContentType; + } + } + let gzip = options.gzip; + if (gzip === 'auto') { + gzip = COMPRESSIBLE_MIME_REGEX.test(options.metadata.contentType || ''); + } + if (gzip) { + options.metadata.contentEncoding = 'gzip'; + } + let crc32c = true; + let md5 = false; + if (typeof options.validation === 'string') { + options.validation = options.validation.toLowerCase(); + crc32c = options.validation === 'crc32c'; + md5 = options.validation === 'md5'; + } + else if (options.validation === false) { + crc32c = false; + md5 = false; + } + if (options.offset) { + if (md5) { + throw new RangeError(FileExceptionMessages.MD5_RESUMED_UPLOAD); + } + if (crc32c && !options.isPartialUpload && !options.resumeCRC32C) { + throw new RangeError(FileExceptionMessages.MISSING_RESUME_CRC32C_FINAL_UPLOAD); + } + } + /** + * A callback for determining when the underlying pipeline is complete. + * It's possible the pipeline callback could error before the write stream + * calls `final` so by default this will destroy the write stream unless the + * write stream sets this callback via its `final` handler. + * @param error An optional error + */ + let pipelineCallback = error => { + writeStream.destroy(error || undefined); + }; + // A stream for consumer to write to + const writeStream = new stream_1.Writable({ + final(cb) { + // Set the pipeline callback to this callback so the pipeline's results + // can be populated to the consumer + pipelineCallback = cb; + emitStream.end(); + }, + write(chunk, encoding, cb) { + emitStream.write(chunk, encoding, cb); + }, + }); + // If the write stream, which is returned to the caller, catches an error we need to make sure that + // at least one of the streams in the pipeline below gets notified so that they + // all get cleaned up / destroyed. + writeStream.once('error', e => { + emitStream.destroy(e); + }); + // If the write stream is closed, cleanup the pipeline below by calling destroy on one of the streams. + writeStream.once('close', () => { + emitStream.destroy(); + }); + const transformStreams = []; + if (gzip) { + transformStreams.push(zlib.createGzip()); + } + const emitStream = new util_js_2.PassThroughShim(); + let hashCalculatingStream = null; + if (crc32c || md5) { + const crc32cInstance = options.resumeCRC32C + ? crc32c_js_1.CRC32C.from(options.resumeCRC32C) + : undefined; + hashCalculatingStream = new hash_stream_validator_js_1.HashStreamValidator({ + crc32c, + crc32cInstance, + md5, + crc32cGenerator: this.crc32cGenerator, + updateHashesOnly: true, + }); + transformStreams.push(hashCalculatingStream); + } + const fileWriteStream = (0, duplexify_1.default)(); + let fileWriteStreamMetadataReceived = false; + // Handing off emitted events to users + emitStream.on('reading', () => writeStream.emit('reading')); + emitStream.on('writing', () => writeStream.emit('writing')); + fileWriteStream.on('uri', evt => writeStream.emit('uri', evt)); + fileWriteStream.on('progress', evt => writeStream.emit('progress', evt)); + fileWriteStream.on('response', resp => writeStream.emit('response', resp)); + fileWriteStream.once('metadata', () => { + fileWriteStreamMetadataReceived = true; + }); + writeStream.once('writing', () => { + if (options.resumable === false) { + this.startSimpleUpload_(fileWriteStream, options); + } + else { + this.startResumableUpload_(fileWriteStream, options); + } + (0, stream_1.pipeline)(emitStream, ...transformStreams, fileWriteStream, async (e) => { + if (e) { + return pipelineCallback(e); + } + // We want to make sure we've received the metadata from the server in order + // to properly validate the object's integrity. Depending on the type of upload, + // the stream could close before the response is returned. + if (!fileWriteStreamMetadataReceived) { + try { + await new Promise((resolve, reject) => { + fileWriteStream.once('metadata', resolve); + fileWriteStream.once('error', reject); + }); + } + catch (e) { + return pipelineCallback(e); + } + } + // Emit the local CRC32C value for future validation, if validation is enabled. + if (hashCalculatingStream === null || hashCalculatingStream === void 0 ? void 0 : hashCalculatingStream.crc32c) { + writeStream.emit('crc32c', hashCalculatingStream.crc32c); + } + try { + // Metadata may not be ready if the upload is a partial upload, + // nothing to validate yet. + const metadataNotReady = options.isPartialUpload && !this.metadata; + if (hashCalculatingStream && !metadataNotReady) { + await __classPrivateFieldGet(this, _File_instances, "m", _File_validateIntegrity).call(this, hashCalculatingStream, { + crc32c, + md5, + }); + } + pipelineCallback(); + } + catch (e) { + pipelineCallback(e); + } + }); + }); + return writeStream; + } + delete(optionsOrCallback, cb) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + this.disableAutoRetryConditionallyIdempotent_(this.methods.delete, bucket_js_1.AvailableServiceObjectMethods.delete, options); + super + .delete(options) + .then(resp => cb(null, ...resp)) + .catch(cb) + .finally(() => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + }); + } + /** + * @typedef {array} DownloadResponse + * @property [0] The contents of a File. + */ + /** + * @callback DownloadCallback + * @param err Request error, if any. + * @param contents The contents of a File. + */ + /** + * Convenience method to download a file into memory or to a local + * destination. + * + * @param {object} [options] Configuration options. The arguments match those + * passed to {@link File#createReadStream}. + * @param {string} [options.destination] Local file path to write the file's + * contents to. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {DownloadCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * // Download a file into memory. The contents will be available as the + * second + * // argument in the demonstration below, `contents`. + * //- + * file.download(function(err, contents) {}); + * + * //- + * // Download a file to a local destination. + * //- + * file.download({ + * destination: '/Users/me/Desktop/file-backup.txt' + * }, function(err) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.download().then(function(data) { + * const contents = data[0]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_download_file + * Another example: + * + * @example include:samples/encryption.js + * region_tag:storage_download_encrypted_file + * Example of downloading an encrypted file: + * + * @example include:samples/requesterPays.js + * region_tag:storage_download_file_requester_pays + * Example of downloading a file where the requester pays: + */ + download(optionsOrCallback, cb) { + let options; + if (typeof optionsOrCallback === 'function') { + cb = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + let called = false; + const callback = ((...args) => { + if (!called) + cb(...args); + called = true; + }); + const destination = options.destination; + delete options.destination; + const fileStream = this.createReadStream(options); + let receivedData = false; + if (destination) { + fileStream + .on('error', callback) + .once('data', data => { + receivedData = true; + // We know that the file exists the server - now we can truncate/write to a file + const writable = fs.createWriteStream(destination); + writable.write(data); + fileStream + .pipe(writable) + .on('error', (err) => { + callback(err, Buffer.from('')); + }) + .on('finish', () => { + callback(null, data); + }); + }) + .on('end', () => { + // In the case of an empty file no data will be received before the end event fires + if (!receivedData) { + const data = Buffer.alloc(0); + try { + fs.writeFileSync(destination, data); + callback(null, data); + } + catch (e) { + callback(e, data); + } + } + }); + } + else { + this.getBufferFromReadable(fileStream) + .then(contents => callback === null || callback === void 0 ? void 0 : callback(null, contents)) + .catch(callback); + } + } + /** + * The Storage API allows you to use a custom key for server-side encryption. + * + * See {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys} + * + * @param {string|buffer} encryptionKey An AES-256 encryption key. + * @returns {File} + * + * @example + * ``` + * const crypto = require('crypto'); + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const encryptionKey = crypto.randomBytes(32); + * + * const fileWithCustomEncryption = myBucket.file('my-file'); + * fileWithCustomEncryption.setEncryptionKey(encryptionKey); + * + * const fileWithoutCustomEncryption = myBucket.file('my-file'); + * + * fileWithCustomEncryption.save('data', function(err) { + * // Try to download with the File object that hasn't had + * // `setEncryptionKey()` called: + * fileWithoutCustomEncryption.download(function(err) { + * // We will receive an error: + * // err.message === 'Bad Request' + * + * // Try again with the File object we called `setEncryptionKey()` on: + * fileWithCustomEncryption.download(function(err, contents) { + * // contents.toString() === 'data' + * }); + * }); + * }); + * + * ``` + * @example include:samples/encryption.js + * region_tag:storage_upload_encrypted_file + * Example of uploading an encrypted file: + * + * @example include:samples/encryption.js + * region_tag:storage_download_encrypted_file + * Example of downloading an encrypted file: + */ + setEncryptionKey(encryptionKey) { + this.encryptionKey = encryptionKey; + this.encryptionKeyBase64 = Buffer.from(encryptionKey).toString('base64'); + this.encryptionKeyHash = crypto + .createHash('sha256') + // eslint-disable-next-line @typescript-eslint/no-explicit-any + .update(this.encryptionKeyBase64, 'base64') + .digest('base64'); + this.encryptionKeyInterceptor = { + request: reqOpts => { + reqOpts.headers = reqOpts.headers || {}; + reqOpts.headers['x-goog-encryption-algorithm'] = 'AES256'; + reqOpts.headers['x-goog-encryption-key'] = this.encryptionKeyBase64; + reqOpts.headers['x-goog-encryption-key-sha256'] = + this.encryptionKeyHash; + return reqOpts; + }, + }; + this.interceptors.push(this.encryptionKeyInterceptor); + return this; + } + /** + * Gets a reference to a Cloud Storage {@link File} file from the provided URL in string format. + * @param {string} publicUrlOrGsUrl the URL as a string. Must be of the format gs://bucket/file + * or https://storage.googleapis.com/bucket/file. + * @param {Storage} storageInstance an instance of a Storage object. + * @param {FileOptions} [options] Configuration options + * @returns {File} + */ + static from(publicUrlOrGsUrl, storageInstance, options) { + const gsMatches = [...publicUrlOrGsUrl.matchAll(GS_UTIL_URL_REGEX)]; + const httpsMatches = [...publicUrlOrGsUrl.matchAll(HTTPS_PUBLIC_URL_REGEX)]; + if (gsMatches.length > 0) { + const bucket = new bucket_js_1.Bucket(storageInstance, gsMatches[0][2]); + return new File(bucket, gsMatches[0][3], options); + } + else if (httpsMatches.length > 0) { + const bucket = new bucket_js_1.Bucket(storageInstance, httpsMatches[0][3]); + return new File(bucket, httpsMatches[0][4], options); + } + else { + throw new Error('URL string must be of format gs://bucket/file or https://storage.googleapis.com/bucket/file'); + } + } + get(optionsOrCallback, cb) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = + typeof optionsOrCallback === 'function' + ? optionsOrCallback + : cb; + super + .get(options) + .then(resp => cb(null, ...resp)) + .catch(cb); + } + /** + * @typedef {array} GetExpirationDateResponse + * @property {date} 0 A Date object representing the earliest time this file's + * retention policy will expire. + */ + /** + * @callback GetExpirationDateCallback + * @param {?Error} err Request error, if any. + * @param {date} expirationDate A Date object representing the earliest time + * this file's retention policy will expire. + */ + /** + * If this bucket has a retention policy defined, use this method to get a + * Date object representing the earliest time this file will expire. + * + * @param {GetExpirationDateCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.getExpirationDate(function(err, expirationDate) { + * // expirationDate is a Date object. + * }); + * ``` + */ + getExpirationDate(callback) { + this.getMetadata((err, metadata, apiResponse) => { + if (err) { + callback(err, null, apiResponse); + return; + } + if (!metadata.retentionExpirationTime) { + const error = new Error(FileExceptionMessages.EXPIRATION_TIME_NA); + callback(error, null, apiResponse); + return; + } + callback(null, new Date(metadata.retentionExpirationTime), apiResponse); + }); + } + /** + * @typedef {array} GenerateSignedPostPolicyV2Response + * @property {object} 0 The document policy. + */ + /** + * @callback GenerateSignedPostPolicyV2Callback + * @param {?Error} err Request error, if any. + * @param {object} policy The document policy. + */ + /** + * Get a signed policy document to allow a user to upload data with a POST + * request. + * + * In Google Cloud Platform environments, such as Cloud Functions and App + * Engine, you usually don't provide a `keyFilename` or `credentials` during + * instantiation. In those environments, we call the + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} + * to create a signed policy. That API requires either the + * `https://www.googleapis.com/auth/iam` or + * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are + * enabled. + * + * See {@link https://cloud.google.com/storage/docs/xml-api/post-object-v2| POST Object with the V2 signing process} + * + * @throws {Error} If an expiration timestamp from the past is given. + * @throws {Error} If options.equals has an array with less or more than two + * members. + * @throws {Error} If options.startsWith has an array with less or more than two + * members. + * + * @param {object} options Configuration options. + * @param {array|array[]} [options.equals] Array of request parameters and + * their expected value (e.g. [['$', '']]). Values are + * translated into equality constraints in the conditions field of the + * policy document (e.g. ['eq', '$', '']). If only one + * equality condition is to be specified, options.equals can be a one- + * dimensional array (e.g. ['$', '']). + * @param {*} options.expires - A timestamp when this policy will expire. Any + * value given is passed to `new Date()`. + * @param {array|array[]} [options.startsWith] Array of request parameters and + * their expected prefixes (e.g. [['$', '']). Values are + * translated into starts-with constraints in the conditions field of the + * policy document (e.g. ['starts-with', '$', '']). If only + * one prefix condition is to be specified, options.startsWith can be a + * one- dimensional array (e.g. ['$', '']). + * @param {string} [options.acl] ACL for the object from possibly predefined + * ACLs. + * @param {string} [options.successRedirect] The URL to which the user client + * is redirected if the upload is successful. + * @param {string} [options.successStatus] - The status of the Google Storage + * response if the upload is successful (must be string). + * @param {object} [options.contentLengthRange] + * @param {number} [options.contentLengthRange.min] Minimum value for the + * request's content length. + * @param {number} [options.contentLengthRange.max] Maximum value for the + * request's content length. + * @param {GenerateSignedPostPolicyV2Callback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * const options = { + * equals: ['$Content-Type', 'image/jpeg'], + * expires: '10-25-2022', + * contentLengthRange: { + * min: 0, + * max: 1024 + * } + * }; + * + * file.generateSignedPostPolicyV2(options, function(err, policy) { + * // policy.string: the policy document in plain text. + * // policy.base64: the policy document in base64. + * // policy.signature: the policy signature in base64. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.generateSignedPostPolicyV2(options).then(function(data) { + * const policy = data[0]; + * }); + * ``` + */ + generateSignedPostPolicyV2(optionsOrCallback, cb) { + const args = (0, util_js_2.normalize)(optionsOrCallback, cb); + let options = args.options; + const callback = args.callback; + const expires = new Date(options.expires); + if (isNaN(expires.getTime())) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_INVALID); + } + if (expires.valueOf() < Date.now()) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_PAST); + } + options = Object.assign({}, options); + const conditions = [ + ['eq', '$key', this.name], + { + bucket: this.bucket.name, + }, + ]; + if (Array.isArray(options.equals)) { + if (!Array.isArray(options.equals[0])) { + options.equals = [options.equals]; + } + options.equals.forEach(condition => { + if (!Array.isArray(condition) || condition.length !== 2) { + throw new Error(FileExceptionMessages.EQUALS_CONDITION_TWO_ELEMENTS); + } + conditions.push(['eq', condition[0], condition[1]]); + }); + } + if (Array.isArray(options.startsWith)) { + if (!Array.isArray(options.startsWith[0])) { + options.startsWith = [options.startsWith]; + } + options.startsWith.forEach(condition => { + if (!Array.isArray(condition) || condition.length !== 2) { + throw new Error(FileExceptionMessages.STARTS_WITH_TWO_ELEMENTS); + } + conditions.push(['starts-with', condition[0], condition[1]]); + }); + } + if (options.acl) { + conditions.push({ + acl: options.acl, + }); + } + if (options.successRedirect) { + conditions.push({ + success_action_redirect: options.successRedirect, + }); + } + if (options.successStatus) { + conditions.push({ + success_action_status: options.successStatus, + }); + } + if (options.contentLengthRange) { + const min = options.contentLengthRange.min; + const max = options.contentLengthRange.max; + if (typeof min !== 'number' || typeof max !== 'number') { + throw new Error(FileExceptionMessages.CONTENT_LENGTH_RANGE_MIN_MAX); + } + conditions.push(['content-length-range', min, max]); + } + const policy = { + expiration: expires.toISOString(), + conditions, + }; + const policyString = JSON.stringify(policy); + const policyBase64 = Buffer.from(policyString).toString('base64'); + this.storage.authClient.sign(policyBase64, options.signingEndpoint).then(signature => { + callback(null, { + string: policyString, + base64: policyBase64, + signature, + }); + }, err => { + callback(new signer_js_1.SigningError(err.message)); + }); + } + /** + * @typedef {object} SignedPostPolicyV4Output + * @property {string} url The request URL. + * @property {object} fields The form fields to include in the POST request. + */ + /** + * @typedef {array} GenerateSignedPostPolicyV4Response + * @property {SignedPostPolicyV4Output} 0 An object containing the request URL and form fields. + */ + /** + * @callback GenerateSignedPostPolicyV4Callback + * @param {?Error} err Request error, if any. + * @param {SignedPostPolicyV4Output} output An object containing the request URL and form fields. + */ + /** + * Get a v4 signed policy document to allow a user to upload data with a POST + * request. + * + * In Google Cloud Platform environments, such as Cloud Functions and App + * Engine, you usually don't provide a `keyFilename` or `credentials` during + * instantiation. In those environments, we call the + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} + * to create a signed policy. That API requires either the + * `https://www.googleapis.com/auth/iam` or + * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are + * enabled. + * + * See {@link https://cloud.google.com/storage/docs/xml-api/post-object#policydocument| Policy Document Reference} + * + * @param {object} options Configuration options. + * @param {Date|number|string} options.expires - A timestamp when this policy will expire. Any + * value given is passed to `new Date()`. + * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style + * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style + * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs + * should generally be preferred instead of path-style URL. + * Currently defaults to `false` for path-style, although this may change in a + * future major-version release. + * @param {string} [config.bucketBoundHostname] The bucket-bound hostname to return in + * the result, e.g. "https://cdn.example.com". + * @param {object} [config.fields] [Form fields]{@link https://cloud.google.com/storage/docs/xml-api/post-object#policydocument} + * to include in the signed policy. Any fields with key beginning with 'x-ignore-' + * will not be included in the policy to be signed. + * @param {object[]} [config.conditions] [Conditions]{@link https://cloud.google.com/storage/docs/authentication/signatures#policy-document} + * to include in the signed policy. All fields given in `config.fields` are + * automatically included in the conditions array, adding the same entry + * in both `fields` and `conditions` will result in duplicate entries. + * + * @param {GenerateSignedPostPolicyV4Callback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * const options = { + * expires: '10-25-2022', + * conditions: [ + * ['eq', '$Content-Type', 'image/jpeg'], + * ['content-length-range', 0, 1024], + * ], + * fields: { + * acl: 'public-read', + * 'x-goog-meta-foo': 'bar', + * 'x-ignore-mykey': 'data' + * } + * }; + * + * file.generateSignedPostPolicyV4(options, function(err, response) { + * // response.url The request URL + * // response.fields The form fields (including the signature) to include + * // to be used to upload objects by HTML forms. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.generateSignedPostPolicyV4(options).then(function(data) { + * const response = data[0]; + * // response.url The request URL + * // response.fields The form fields (including the signature) to include + * // to be used to upload objects by HTML forms. + * }); + * ``` + */ + generateSignedPostPolicyV4(optionsOrCallback, cb) { + const args = (0, util_js_2.normalize)(optionsOrCallback, cb); + let options = args.options; + const callback = args.callback; + const expires = new Date(options.expires); + if (isNaN(expires.getTime())) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_INVALID); + } + if (expires.valueOf() < Date.now()) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_PAST); + } + if (expires.valueOf() - Date.now() > SEVEN_DAYS * 1000) { + throw new Error(`Max allowed expiration is seven days (${SEVEN_DAYS} seconds).`); + } + options = Object.assign({}, options); + let fields = Object.assign({}, options.fields); + const now = new Date(); + const nowISO = (0, util_js_2.formatAsUTCISO)(now, true); + const todayISO = (0, util_js_2.formatAsUTCISO)(now); + const sign = async () => { + const { client_email } = await this.storage.authClient.getCredentials(); + const credential = `${client_email}/${todayISO}/auto/storage/goog4_request`; + fields = { + ...fields, + bucket: this.bucket.name, + key: this.name, + 'x-goog-date': nowISO, + 'x-goog-credential': credential, + 'x-goog-algorithm': 'GOOG4-RSA-SHA256', + }; + const conditions = options.conditions || []; + Object.entries(fields).forEach(([key, value]) => { + if (!key.startsWith('x-ignore-')) { + conditions.push({ [key]: value }); + } + }); + delete fields.bucket; + const expiration = (0, util_js_2.formatAsUTCISO)(expires, true, '-', ':'); + const policy = { + conditions, + expiration, + }; + const policyString = (0, util_js_2.unicodeJSONStringify)(policy); + const policyBase64 = Buffer.from(policyString).toString('base64'); + try { + const signature = await this.storage.authClient.sign(policyBase64, options.signingEndpoint); + const signatureHex = Buffer.from(signature, 'base64').toString('hex'); + const universe = this.parent.storage.universeDomain; + fields['policy'] = policyBase64; + fields['x-goog-signature'] = signatureHex; + let url; + if (this.storage.customEndpoint) { + url = this.storage.apiEndpoint; + } + else if (options.virtualHostedStyle) { + url = `https://${this.bucket.name}.storage.${universe}/`; + } + else if (options.bucketBoundHostname) { + url = `${options.bucketBoundHostname}/`; + } + else { + url = `https://storage.${universe}/${this.bucket.name}/`; + } + return { + url, + fields, + }; + } + catch (err) { + throw new signer_js_1.SigningError(err.message); + } + }; + sign().then(res => callback(null, res), callback); + } + /** + * @typedef {array} GetSignedUrlResponse + * @property {object} 0 The signed URL. + */ + /** + * @callback GetSignedUrlCallback + * @param {?Error} err Request error, if any. + * @param {object} url The signed URL. + */ + /** + * Get a signed URL to allow limited time access to the file. + * + * In Google Cloud Platform environments, such as Cloud Functions and App + * Engine, you usually don't provide a `keyFilename` or `credentials` during + * instantiation. In those environments, we call the + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} + * to create a signed URL. That API requires either the + * `https://www.googleapis.com/auth/iam` or + * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are + * enabled. + * + * See {@link https://cloud.google.com/storage/docs/access-control/signed-urls| Signed URLs Reference} + * + * @throws {Error} if an expiration timestamp from the past is given. + * + * @param {object} config Configuration object. + * @param {string} config.action "read" (HTTP: GET), "write" (HTTP: PUT), or + * "delete" (HTTP: DELETE), "resumable" (HTTP: POST). + * When using "resumable", the header `X-Goog-Resumable: start` has + * to be sent when making a request with the signed URL. + * @param {*} config.expires A timestamp when this link will expire. Any value + * given is passed to `new Date()`. + * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now. + * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example} + * @param {string} [config.version='v2'] The signing version to use, either + * 'v2' or 'v4'. + * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style + * URLs (e.g. 'https://mybucket.storage.googleapis.com/...') instead of path-style + * (e.g. 'https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs + * should generally be preferred instaed of path-style URL. + * Currently defaults to `false` for path-style, although this may change in a + * future major-version release. + * @param {string} [config.cname] The cname for this bucket, i.e., + * "https://cdn.example.com". + * @param {string} [config.contentMd5] The MD5 digest value in base64. Just like + * if you provide this, the client must provide this HTTP header with this same + * value in its request, so to if this parameter is not provided here, + * the client must not provide any value for this HTTP header in its request. + * @param {string} [config.contentType] Just like if you provide this, the client + * must provide this HTTP header with this same value in its request, so to if + * this parameter is not provided here, the client must not provide any value + * for this HTTP header in its request. + * @param {object} [config.extensionHeaders] If these headers are used, the + * server will check to make sure that the client provides matching + * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers} + * for the requirements of this feature, most notably: + * - The header name must be prefixed with `x-goog-` + * - The header name must be all lowercase + * + * Note: Multi-valued header passed as an array in the extensionHeaders + * object is converted into a string, delimited by `,` with + * no space. Requests made using the signed URL will need to + * delimit multi-valued headers using a single `,` as well, or + * else the server will report a mismatched signature. + * @param {object} [config.queryParams] Additional query parameters to include + * in the signed URL. + * @param {string} [config.promptSaveAs] The filename to prompt the user to + * save the file as when the signed url is accessed. This is ignored if + * `config.responseDisposition` is set. + * @param {string} [config.responseDisposition] The + * {@link http://goo.gl/yMWxQV| response-content-disposition parameter} of the + * signed url. + * @param {*} [config.accessibleAt=Date.now()] A timestamp when this link became usable. Any value + * given is passed to `new Date()`. + * Note: Use for 'v4' only. + * @param {string} [config.responseType] The response-content-type parameter + * of the signed url. + * @param {GetSignedUrlCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * // Generate a URL that allows temporary access to download your file. + * //- + * const request = require('request'); + * + * const config = { + * action: 'read', + * expires: '03-17-2025', + * }; + * + * file.getSignedUrl(config, function(err, url) { + * if (err) { + * console.error(err); + * return; + * } + * + * // The file is now available to read from this URL. + * request(url, function(err, resp) { + * // resp.statusCode = 200 + * }); + * }); + * + * //- + * // Generate a URL that allows temporary access to download your file. + * // Access will begin at accessibleAt and end at expires. + * //- + * const request = require('request'); + * + * const config = { + * action: 'read', + * expires: '03-17-2025', + * accessibleAt: '03-13-2025' + * }; + * + * file.getSignedUrl(config, function(err, url) { + * if (err) { + * console.error(err); + * return; + * } + * + * // The file will be available to read from this URL from 03-13-2025 to 03-17-2025. + * request(url, function(err, resp) { + * // resp.statusCode = 200 + * }); + * }); + * + * //- + * // Generate a URL to allow write permissions. This means anyone with this + * URL + * // can send a POST request with new data that will overwrite the file. + * //- + * file.getSignedUrl({ + * action: 'write', + * expires: '03-17-2025' + * }, function(err, url) { + * if (err) { + * console.error(err); + * return; + * } + * + * // The file is now available to be written to. + * const writeStream = request.put(url); + * writeStream.end('New data'); + * + * writeStream.on('complete', function(resp) { + * // Confirm the new content was saved. + * file.download(function(err, fileContents) { + * console.log('Contents:', fileContents.toString()); + * // Contents: New data + * }); + * }); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.getSignedUrl(config).then(function(data) { + * const url = data[0]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_generate_signed_url + * Another example: + */ + getSignedUrl(cfg, callback) { + const method = ActionToHTTPMethod[cfg.action]; + const extensionHeaders = (0, util_js_2.objectKeyToLowercase)(cfg.extensionHeaders || {}); + if (cfg.action === 'resumable') { + extensionHeaders['x-goog-resumable'] = 'start'; + } + const queryParams = Object.assign({}, cfg.queryParams); + if (typeof cfg.responseType === 'string') { + queryParams['response-content-type'] = cfg.responseType; + } + if (typeof cfg.promptSaveAs === 'string') { + queryParams['response-content-disposition'] = + 'attachment; filename="' + cfg.promptSaveAs + '"'; + } + if (typeof cfg.responseDisposition === 'string') { + queryParams['response-content-disposition'] = cfg.responseDisposition; + } + if (this.generation) { + queryParams['generation'] = this.generation.toString(); + } + const signConfig = { + method, + expires: cfg.expires, + accessibleAt: cfg.accessibleAt, + extensionHeaders, + queryParams, + contentMd5: cfg.contentMd5, + contentType: cfg.contentType, + host: cfg.host, + }; + if (cfg.cname) { + signConfig.cname = cfg.cname; + } + if (cfg.version) { + signConfig.version = cfg.version; + } + if (cfg.virtualHostedStyle) { + signConfig.virtualHostedStyle = cfg.virtualHostedStyle; + } + if (!this.signer) { + this.signer = new signer_js_1.URLSigner(this.storage.authClient, this.bucket, this, this.storage); + } + this.signer + .getSignedUrl(signConfig) + .then(signedUrl => callback(null, signedUrl), callback); + } + /** + * @callback IsPublicCallback + * @param {?Error} err Request error, if any. + * @param {boolean} resp Whether file is public or not. + */ + /** + * @typedef {array} IsPublicResponse + * @property {boolean} 0 Whether file is public or not. + */ + /** + * Check whether this file is public or not by sending + * a HEAD request without credentials. + * No errors from the server indicates that the current + * file is public. + * A 403-Forbidden error {@link https://cloud.google.com/storage/docs/json_api/v1/status-codes#403_Forbidden} + * indicates that file is private. + * Any other non 403 error is propagated to user. + * + * @param {IsPublicCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * // Check whether the file is publicly accessible. + * //- + * file.isPublic(function(err, resp) { + * if (err) { + * console.error(err); + * return; + * } + * console.log(`the file ${file.id} is public: ${resp}`) ; + * }) + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.isPublic().then(function(data) { + * const resp = data[0]; + * }); + * ``` + */ + isPublic(callback) { + var _a; + // Build any custom headers based on the defined interceptors on the parent + // storage object and this object + const storageInterceptors = ((_a = this.storage) === null || _a === void 0 ? void 0 : _a.interceptors) || []; + const fileInterceptors = this.interceptors || []; + const allInterceptors = storageInterceptors.concat(fileInterceptors); + const headers = allInterceptors.reduce((acc, curInterceptor) => { + const currentHeaders = curInterceptor.request({ + uri: `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`, + }); + Object.assign(acc, currentHeaders.headers); + return acc; + }, {}); + index_js_1.util.makeRequest({ + method: 'GET', + uri: `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`, + headers, + }, { + retryOptions: this.storage.retryOptions, + }, (err) => { + if (err) { + const apiError = err; + if (apiError.code === 403) { + callback(null, false); + } + else { + callback(err); + } + } + else { + callback(null, true); + } + }); + } + /** + * @typedef {object} MakeFilePrivateOptions Configuration options for File#makePrivate(). + * @property {Metadata} [metadata] Define custom metadata properties to define + * along with the operation. + * @property {boolean} [strict] If true, set the file to be private to + * only the owner user. Otherwise, it will be private to the project. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback MakeFilePrivateCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} MakeFilePrivateResponse + * @property {object} 0 The full API response. + */ + /** + * Make a file private to the project and remove all other permissions. + * Set `options.strict` to true to make the file private to only the owner. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/patch| Objects: patch API Documentation} + * + * @param {MakeFilePrivateOptions} [options] Configuration options. + * @param {MakeFilePrivateCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * // Set the file private so only project maintainers can see and modify it. + * //- + * file.makePrivate(function(err) {}); + * + * //- + * // Set the file private so only the owner can see and modify it. + * //- + * file.makePrivate({ strict: true }, function(err) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.makePrivate().then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + makePrivate(optionsOrCallback, callback) { + var _a, _b; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const query = { + predefinedAcl: options.strict ? 'private' : 'projectPrivate', + // eslint-disable-next-line @typescript-eslint/no-explicit-any + }; + if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifMetagenerationMatch) !== undefined) { + query.ifMetagenerationMatch = + (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifMetagenerationMatch; + delete options.preconditionOpts; + } + if (options.userProject) { + query.userProject = options.userProject; + } + // You aren't allowed to set both predefinedAcl & acl properties on a file, + // so acl must explicitly be nullified, destroying all previous acls on the + // file. + const metadata = { ...options.metadata, acl: null }; + this.setMetadata(metadata, query, callback); + } + /** + * @typedef {array} MakeFilePublicResponse + * @property {object} 0 The full API response. + */ + /** + * @callback MakeFilePublicCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Set a file to be publicly readable and maintain all previous permissions. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/insert| ObjectAccessControls: insert API Documentation} + * + * @param {MakeFilePublicCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.makePublic(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.makePublic().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_make_public + * Another example: + */ + makePublic(callback) { + callback = callback || index_js_1.util.noop; + this.acl.add({ + entity: 'allUsers', + role: 'READER', + }, (err, acl, resp) => { + callback(err, resp); + }); + } + /** + * The public URL of this File + * Use {@link File#makePublic} to enable anonymous access via the returned URL. + * + * @returns {string} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const file = bucket.file('my-file'); + * + * // publicUrl will be "https://storage.googleapis.com/albums/my-file" + * const publicUrl = file.publicUrl(); + * ``` + */ + publicUrl() { + return `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`; + } + /** + * @typedef {array} MoveFileAtomicResponse + * @property {File} 0 The moved {@link File}. + * @property {object} 1 The full API response. + */ + /** + * @callback MoveFileAtomicCallback + * @param {?Error} err Request error, if any. + * @param {File} movedFile The moved {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} MoveFileAtomicOptions Configuration options for File#moveFileAtomic(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {object} [preconditionOpts] Precondition options. + * @property {number} [preconditionOpts.ifGenerationMatch] Makes the operation conditional on whether the object's current generation matches the given value. + */ + /** + * Move this file within the same HNS-enabled bucket. + * The source object must exist and be a live object. + * The source and destination object IDs must be different. + * Overwriting the destination object is allowed by default, but can be prevented + * using preconditions. + * If the destination path includes non-existent parent folders, they will be created. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/move| Objects: move API Documentation} + * + * @throws {Error} If the destination file is not provided. + * + * @param {string|File} destination Destination file name or File object within the same bucket.. + * @param {MoveFileAtomicOptions} [options] Configuration options. See an + * @param {MoveFileAtomicCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Assume 'my-hns-bucket' is an HNS-enabled bucket. + * //- + * const bucket = storage.bucket('my-hns-bucket'); + * const file = bucket.file('my-image.png'); + * + * //- + * // If you pass in a string for the destination, the file is copied to its + * // current bucket, under the new name provided. + * //- + * file.moveFileAtomic('moved-image.png', function(err, movedFile, apiResponse) { + * // `my-hns-bucket` now contains: + * // - "moved-image.png" + * + * // `movedFile` is an instance of a File object that refers to your new + * // file. + * }); + * + * //- + * // Move the file to a subdirectory, creating parent folders if necessary. + * //- + * file.moveFileAtomic('new-folder/subfolder/moved-image.png', function(err, movedFile, apiResponse) { + * // `my-hns-bucket` now contains: + * // - "new-folder/subfolder/moved-image.png" + * }); + * + * //- + * // Prevent overwriting an existing destination object using preconditions. + * //- + * file.moveFileAtomic('existing-destination.png', { + * preconditionOpts: { + * ifGenerationMatch: 0 // Fails if the destination object exists. + * } + * }, function(err, movedFile, apiResponse) { + * if (err) { + * // Handle the error (e.g., the destination object already exists). + * } else { + * // Move successful. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.moveFileAtomic('moved-image.png).then(function(data) { + * const newFile = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_move_file_hns + * Another example: + */ + moveFileAtomic(destination, optionsOrCallback, callback) { + var _a, _b; + const noDestinationError = new Error(FileExceptionMessages.DESTINATION_NO_NAME); + if (!destination) { + throw noDestinationError; + } + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = { ...optionsOrCallback }; + } + callback = callback || index_js_1.util.noop; + let destName; + let newFile; + if (typeof destination === 'string') { + const parsedDestination = GS_URL_REGEXP.exec(destination); + if (parsedDestination !== null && parsedDestination.length === 3) { + destName = parsedDestination[2]; + } + else { + destName = destination; + } + } + else if (destination instanceof File) { + destName = destination.name; + newFile = destination; + } + else { + throw noDestinationError; + } + newFile = newFile || this.bucket.file(destName); + if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) { + this.storage.retryOptions.autoRetry = false; + } + const query = {}; + if (options.userProject !== undefined) { + query.userProject = options.userProject; + delete options.userProject; + } + if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined) { + query.ifGenerationMatch = (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch; + delete options.preconditionOpts; + } + this.request({ + method: 'POST', + uri: `/moveTo/o/${encodeURIComponent(newFile.name)}`, + qs: query, + json: options, + }, (err, resp) => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + if (err) { + callback(err, null, resp); + return; + } + callback(null, newFile, resp); + }); + } + /** + * @typedef {array} MoveResponse + * @property {File} 0 The destination File. + * @property {object} 1 The full API response. + */ + /** + * @callback MoveCallback + * @param {?Error} err Request error, if any. + * @param {?File} destinationFile The destination File. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} MoveOptions Configuration options for File#move(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Move this file to another location. By default, this will rename the file + * and keep it in the same bucket, but you can choose to move it to another + * Bucket by providing a Bucket or File object or a URL beginning with + * "gs://". + * + * **Warning**: + * There is currently no atomic `move` method in the Cloud Storage API, + * so this method is a composition of {@link File#copy} (to the new + * location) and {@link File#delete} (from the old location). While + * unlikely, it is possible that an error returned to your callback could be + * triggered from either one of these API calls failing, which could leave a + * duplicate file lingering. The error message will indicate what operation + * has failed. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/copy| Objects: copy API Documentation} + * + * @throws {Error} If the destination file is not provided. + * + * @param {string|Bucket|File} destination Destination file. + * @param {MoveCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * //- + * // You can pass in a variety of types for the destination. + * // + * // For all of the below examples, assume we are working with the following + * // Bucket and File objects. + * //- + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('my-image.png'); + * + * //- + * // If you pass in a string for the destination, the file is moved to its + * // current bucket, under the new name provided. + * //- + * file.move('my-image-new.png', function(err, destinationFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // but contains instead: + * // - "my-image-new.png" + * + * // `destinationFile` is an instance of a File object that refers to your + * // new file. + * }); + * + * //- + * // If you pass in a string starting with "gs://" for the destination, the + * // file is copied to the other bucket and under the new name provided. + * //- + * const newLocation = 'gs://another-bucket/my-image-new.png'; + * file.move(newLocation, function(err, destinationFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-image-new.png" + * + * // `destinationFile` is an instance of a File object that refers to your + * // new file. + * }); + * + * //- + * // If you pass in a Bucket object, the file will be moved to that bucket + * // using the same name. + * //- + * const anotherBucket = gcs.bucket('another-bucket'); + * + * file.move(anotherBucket, function(err, destinationFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-image.png" + * + * // `destinationFile` is an instance of a File object that refers to your + * // new file. + * }); + * + * //- + * // If you pass in a File object, you have complete control over the new + * // bucket and filename. + * //- + * const anotherFile = anotherBucket.file('my-awesome-image.png'); + * + * file.move(anotherFile, function(err, destinationFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-awesome-image.png" + * + * // Note: + * // The `destinationFile` parameter is equal to `anotherFile`. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.move('my-image-new.png').then(function(data) { + * const destinationFile = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_move_file + * Another example: + */ + move(destination, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + callback = callback || index_js_1.util.noop; + this.copy(destination, options, (err, destinationFile, copyApiResponse) => { + if (err) { + err.message = 'file#copy failed with an error - ' + err.message; + callback(err, null, copyApiResponse); + return; + } + if (this.name !== destinationFile.name || + this.bucket.name !== destinationFile.bucket.name) { + this.delete(options, (err, apiResponse) => { + if (err) { + err.message = 'file#delete failed with an error - ' + err.message; + callback(err, destinationFile, apiResponse); + return; + } + callback(null, destinationFile, copyApiResponse); + }); + } + else { + callback(null, destinationFile, copyApiResponse); + } + }); + } + /** + * @typedef {array} RenameResponse + * @property {File} 0 The destination File. + * @property {object} 1 The full API response. + */ + /** + * @callback RenameCallback + * @param {?Error} err Request error, if any. + * @param {?File} destinationFile The destination File. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} RenameOptions Configuration options for File#move(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Rename this file. + * + * **Warning**: + * There is currently no atomic `rename` method in the Cloud Storage API, + * so this method is an alias of {@link File#move}, which in turn is a + * composition of {@link File#copy} (to the new location) and + * {@link File#delete} (from the old location). While + * unlikely, it is possible that an error returned to your callback could be + * triggered from either one of these API calls failing, which could leave a + * duplicate file lingering. The error message will indicate what operation + * has failed. + * + * @param {string|File} destinationFile Destination file. + * @param {RenameCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // You can pass in a string or a File object. + * // + * // For all of the below examples, assume we are working with the following + * // Bucket and File objects. + * //- + * + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('my-image.png'); + * + * //- + * // You can pass in a string for the destinationFile. + * //- + * file.rename('renamed-image.png', function(err, renamedFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // but contains instead: + * // - "renamed-image.png" + * + * // `renamedFile` is an instance of a File object that refers to your + * // renamed file. + * }); + * + * //- + * // You can pass in a File object. + * //- + * const anotherFile = anotherBucket.file('my-awesome-image.png'); + * + * file.rename(anotherFile, function(err, renamedFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * + * // Note: + * // The `renamedFile` parameter is equal to `anotherFile`. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.rename('my-renamed-image.png').then(function(data) { + * const renamedFile = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + rename(destinationFile, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + callback = callback || index_js_1.util.noop; + this.move(destinationFile, options, callback); + } + /** + * @typedef {object} RestoreOptions Options for File#restore(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + * @param {number} [generation] If present, selects a specific revision of this object. + * @param {string} [restoreToken] Returns an option that must be specified when getting a soft-deleted object from an HNS-enabled + * bucket that has a naming and generation conflict with another object in the same bucket. + * @param {string} [projection] Specifies the set of properties to return. If used, must be 'full' or 'noAcl'. + * @param {string | number} [ifGenerationMatch] Request proceeds if the generation of the target resource + * matches the value used in the precondition. + * If the values don't match, the request fails with a 412 Precondition Failed response. + * @param {string | number} [ifGenerationNotMatch] Request proceeds if the generation of the target resource does + * not match the value used in the precondition. If the values match, the request fails with a 304 Not Modified response. + * @param {string | number} [ifMetagenerationMatch] Request proceeds if the meta-generation of the target resource + * matches the value used in the precondition. + * If the values don't match, the request fails with a 412 Precondition Failed response. + * @param {string | number} [ifMetagenerationNotMatch] Request proceeds if the meta-generation of the target resource does + * not match the value used in the precondition. If the values match, the request fails with a 304 Not Modified response. + */ + /** + * Restores a soft-deleted file + * @param {RestoreOptions} options Restore options. + * @returns {Promise} + */ + async restore(options) { + const [file] = await this.request({ + method: 'POST', + uri: '/restore', + qs: options, + }); + return file; + } + /** + * Makes request and applies userProject query parameter if necessary. + * + * @private + * + * @param {object} reqOpts - The request options. + * @param {function} callback - The callback function. + */ + request(reqOpts, callback) { + return this.parent.request.call(this, reqOpts, callback); + } + /** + * @callback RotateEncryptionKeyCallback + * @extends CopyCallback + */ + /** + * @typedef RotateEncryptionKeyResponse + * @extends CopyResponse + */ + /** + * @param {string|buffer|object} RotateEncryptionKeyOptions Configuration options + * for File#rotateEncryptionKey(). + * If a string or Buffer is provided, it is interpreted as an AES-256, + * customer-supplied encryption key. If you'd like to use a Cloud KMS key + * name, you must specify an options object with the property name: + * `kmsKeyName`. + * @param {string|buffer} [options.encryptionKey] An AES-256 encryption key. + * @param {string} [options.kmsKeyName] A Cloud KMS key name. + */ + /** + * This method allows you to update the encryption key associated with this + * file. + * + * See {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys} + * + * @param {RotateEncryptionKeyOptions} [options] - Configuration options. + * @param {RotateEncryptionKeyCallback} [callback] + * @returns {Promise} + * + * @example include:samples/encryption.js + * region_tag:storage_rotate_encryption_key + * Example of rotating the encryption key for this file: + */ + rotateEncryptionKey(optionsOrCallback, callback) { + var _a; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + let options = {}; + if (typeof optionsOrCallback === 'string' || + optionsOrCallback instanceof Buffer) { + options = { + encryptionKey: optionsOrCallback, + }; + } + else if (typeof optionsOrCallback === 'object') { + options = optionsOrCallback; + } + const newFile = this.bucket.file(this.id, options); + const copyOptions = ((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined + ? { preconditionOpts: options.preconditionOpts } + : {}; + this.copy(newFile, copyOptions, callback); + } + /** + * @typedef {object} SaveOptions + * @extends CreateWriteStreamOptions + */ + /** + * @callback SaveCallback + * @param {?Error} err Request error, if any. + */ + /** + * Write strings or buffers to a file. + * + * *This is a convenience method which wraps {@link File#createWriteStream}.* + * To upload arbitrary data to a file, please use {@link File#createWriteStream} directly. + * + * Resumable uploads are automatically enabled and must be shut off explicitly + * by setting `options.resumable` to `false`. + * + * Multipart uploads with retryable error codes will be retried 3 times with exponential backoff. + * + *

+ * There is some overhead when using a resumable upload that can cause + * noticeable performance degradation while uploading a series of small + * files. When uploading files less than 10MB, it is recommended that the + * resumable feature is disabled. + *

+ * + * @param {SaveData} data The data to write to a file. + * @param {SaveOptions} [options] See {@link File#createWriteStream}'s `options` + * parameter. + * @param {SaveCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * const contents = 'This is the contents of the file.'; + * + * file.save(contents, function(err) { + * if (!err) { + * // File written successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.save(contents).then(function() {}); + * ``` + */ + save(data, optionsOrCallback, callback) { + // tslint:enable:no-any + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + let maxRetries = this.storage.retryOptions.maxRetries; + if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) { + maxRetries = 0; + } + const returnValue = (0, async_retry_1.default)(async (bail) => { + return new Promise((resolve, reject) => { + if (maxRetries === 0) { + this.storage.retryOptions.autoRetry = false; + } + const writable = this.createWriteStream(options); + if (options.onUploadProgress) { + writable.on('progress', options.onUploadProgress); + } + const handleError = (err) => { + if (this.storage.retryOptions.autoRetry && + this.storage.retryOptions.retryableErrorFn(err)) { + return reject(err); + } + return bail(err); + }; + if (typeof data === 'string' || + Buffer.isBuffer(data) || + data instanceof Uint8Array) { + writable + .on('error', handleError) + .on('finish', () => resolve()) + .end(data); + } + else { + (0, stream_1.pipeline)(data, writable, err => { + if (err) { + if (typeof data !== 'function') { + // Only PipelineSourceFunction can be retried. Async-iterables + // and Readable streams can only be consumed once. + return bail(err); + } + handleError(err); + } + else { + resolve(); + } + }); + } + }); + }, { + retries: maxRetries, + factor: this.storage.retryOptions.retryDelayMultiplier, + maxTimeout: this.storage.retryOptions.maxRetryDelay * 1000, //convert to milliseconds + maxRetryTime: this.storage.retryOptions.totalTimeout * 1000, //convert to milliseconds + }); + if (!callback) { + return returnValue; + } + else { + return returnValue + .then(() => { + if (callback) { + return callback(); + } + }) + .catch(callback); + } + } + setMetadata(metadata, optionsOrCallback, cb) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = + typeof optionsOrCallback === 'function' + ? optionsOrCallback + : cb; + this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, bucket_js_1.AvailableServiceObjectMethods.setMetadata, options); + super + .setMetadata(metadata, options) + .then(resp => cb(null, ...resp)) + .catch(cb) + .finally(() => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + }); + } + /** + * @typedef {array} SetStorageClassResponse + * @property {object} 0 The full API response. + */ + /** + * @typedef {object} SetStorageClassOptions Configuration options for File#setStorageClass(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback SetStorageClassCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Set the storage class for this file. + * + * See {@link https://cloud.google.com/storage/docs/per-object-storage-class| Per-Object Storage Class} + * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes} + * + * @param {string} storageClass The new storage class. (`standard`, + * `nearline`, `coldline`, or `archive`) + * **Note:** The storage classes `multi_regional` and `regional` + * are now legacy and will be deprecated in the future. + * @param {SetStorageClassOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {SetStorageClassCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * file.setStorageClass('nearline', function(err, apiResponse) { + * if (err) { + * // Error handling omitted. + * } + * + * // The storage class was updated successfully. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.setStorageClass('nearline').then(function() {}); + * ``` + */ + setStorageClass(storageClass, optionsOrCallback, callback) { + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + const req = { + ...options, + // In case we get input like `storageClass`, convert to `storage_class`. + storageClass: storageClass + .replace(/-/g, '_') + .replace(/([a-z])([A-Z])/g, (_, low, up) => { + return low + '_' + up; + }) + .toUpperCase(), + }; + this.copy(this, req, (err, file, apiResponse) => { + if (err) { + callback(err, apiResponse); + return; + } + this.metadata = file.metadata; + callback(null, apiResponse); + }); + } + /** + * Set a user project to be billed for all requests made from this File + * object. + * + * @param {string} userProject The user project. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const file = bucket.file('my-file'); + * + * file.setUserProject('grape-spaceship-123'); + * ``` + */ + setUserProject(userProject) { + this.bucket.setUserProject.call(this, userProject); + } + /** + * This creates a resumable-upload upload stream. + * + * @param {Duplexify} stream - Duplexify stream of data to pipe to the file. + * @param {object=} options - Configuration object. + * + * @private + */ + startResumableUpload_(dup, options = {}) { + var _a; + (_a = options.metadata) !== null && _a !== void 0 ? _a : (options.metadata = {}); + const retryOptions = this.storage.retryOptions; + if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options.preconditionOpts)) { + retryOptions.autoRetry = false; + } + const cfg = { + authClient: this.storage.authClient, + apiEndpoint: this.storage.apiEndpoint, + bucket: this.bucket.name, + customRequestOptions: this.getRequestInterceptors().reduce((reqOpts, interceptorFn) => interceptorFn(reqOpts), {}), + file: this.name, + generation: this.generation, + isPartialUpload: options.isPartialUpload, + key: this.encryptionKey, + kmsKeyName: this.kmsKeyName, + metadata: options.metadata, + offset: options.offset, + predefinedAcl: options.predefinedAcl, + private: options.private, + public: options.public, + uri: options.uri, + userProject: options.userProject || this.userProject, + retryOptions: { ...retryOptions }, + params: (options === null || options === void 0 ? void 0 : options.preconditionOpts) || this.instancePreconditionOpts, + chunkSize: options === null || options === void 0 ? void 0 : options.chunkSize, + highWaterMark: options === null || options === void 0 ? void 0 : options.highWaterMark, + universeDomain: this.bucket.storage.universeDomain, + [util_js_1.GCCL_GCS_CMD_KEY]: options[util_js_1.GCCL_GCS_CMD_KEY], + }; + let uploadStream; + try { + uploadStream = resumableUpload.upload(cfg); + } + catch (error) { + dup.destroy(error); + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + return; + } + uploadStream + .on('response', resp => { + dup.emit('response', resp); + }) + .on('uri', uri => { + dup.emit('uri', uri); + }) + .on('metadata', metadata => { + this.metadata = metadata; + dup.emit('metadata'); + }) + .on('finish', () => { + dup.emit('complete'); + }) + .on('progress', evt => dup.emit('progress', evt)); + dup.setWritable(uploadStream); + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + } + /** + * Takes a readable stream and pipes it to a remote file. Unlike + * `startResumableUpload_`, which uses the resumable upload technique, this + * method uses a simple upload (all or nothing). + * + * @param {Duplexify} dup - Duplexify stream of data to pipe to the file. + * @param {object=} options - Configuration object. + * + * @private + */ + startSimpleUpload_(dup, options = {}) { + var _a; + (_a = options.metadata) !== null && _a !== void 0 ? _a : (options.metadata = {}); + const apiEndpoint = this.storage.apiEndpoint; + const bucketName = this.bucket.name; + const uri = `${apiEndpoint}/upload/storage/v1/b/${bucketName}/o`; + const reqOpts = { + qs: { + name: this.name, + }, + uri: uri, + [util_js_1.GCCL_GCS_CMD_KEY]: options[util_js_1.GCCL_GCS_CMD_KEY], + }; + if (this.generation !== undefined) { + reqOpts.qs.ifGenerationMatch = this.generation; + } + if (this.kmsKeyName !== undefined) { + reqOpts.qs.kmsKeyName = this.kmsKeyName; + } + if (typeof options.timeout === 'number') { + reqOpts.timeout = options.timeout; + } + if (options.userProject || this.userProject) { + reqOpts.qs.userProject = options.userProject || this.userProject; + } + if (options.predefinedAcl) { + reqOpts.qs.predefinedAcl = options.predefinedAcl; + } + else if (options.private) { + reqOpts.qs.predefinedAcl = 'private'; + } + else if (options.public) { + reqOpts.qs.predefinedAcl = 'publicRead'; + } + Object.assign(reqOpts.qs, this.instancePreconditionOpts, options.preconditionOpts); + index_js_1.util.makeWritableStream(dup, { + makeAuthenticatedRequest: (reqOpts) => { + this.request(reqOpts, (err, body, resp) => { + if (err) { + dup.destroy(err); + return; + } + this.metadata = body; + dup.emit('metadata', body); + dup.emit('response', resp); + dup.emit('complete'); + }); + }, + metadata: options.metadata, + request: reqOpts, + }); + } + disableAutoRetryConditionallyIdempotent_( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + coreOpts, methodType, localPreconditionOptions) { + var _a, _b, _c, _d; + if ((typeof coreOpts === 'object' && + ((_b = (_a = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _a === void 0 ? void 0 : _a.qs) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined && + (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifGenerationMatch) === undefined && + methodType === bucket_js_1.AvailableServiceObjectMethods.delete && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + this.storage.retryOptions.autoRetry = false; + } + if ((typeof coreOpts === 'object' && + ((_d = (_c = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _c === void 0 ? void 0 : _c.qs) === null || _d === void 0 ? void 0 : _d.ifMetagenerationMatch) === undefined && + (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifMetagenerationMatch) === undefined && + methodType === bucket_js_1.AvailableServiceObjectMethods.setMetadata && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + this.storage.retryOptions.autoRetry = false; + } + } + async getBufferFromReadable(readable) { + const buf = []; + for await (const chunk of readable) { + buf.push(chunk); + } + return Buffer.concat(buf); + } +} +exports.File = File; +_File_instances = new WeakSet(), _File_validateIntegrity = +/** + * + * @param hashCalculatingStream + * @param verify + * @returns {boolean} Returns `true` if valid, throws with error otherwise + */ +async function _File_validateIntegrity(hashCalculatingStream, verify = {}) { + const metadata = this.metadata; + // If we're doing validation, assume the worst + let dataMismatch = !!(verify.crc32c || verify.md5); + if (verify.crc32c && metadata.crc32c) { + dataMismatch = !hashCalculatingStream.test('crc32c', metadata.crc32c); + } + if (verify.md5 && metadata.md5Hash) { + dataMismatch = !hashCalculatingStream.test('md5', metadata.md5Hash); + } + if (dataMismatch) { + const errors = []; + let code = ''; + let message = ''; + try { + await this.delete(); + if (verify.md5 && !metadata.md5Hash) { + code = 'MD5_NOT_AVAILABLE'; + message = FileExceptionMessages.MD5_NOT_AVAILABLE; + } + else { + code = 'FILE_NO_UPLOAD'; + message = FileExceptionMessages.UPLOAD_MISMATCH; + } + } + catch (e) { + const error = e; + code = 'FILE_NO_UPLOAD_DELETE'; + message = `${FileExceptionMessages.UPLOAD_MISMATCH_DELETE_FAIL}${error.message}`; + errors.push(error); + } + const error = new RequestError(message); + error.code = code; + error.errors = errors; + throw error; + } + return true; +}; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(File, { + exclude: [ + 'cloudStorageURI', + 'publicUrl', + 'request', + 'save', + 'setEncryptionKey', + 'shouldRetryBasedOnPreconditionAndIdempotencyStrat', + 'getBufferFromReadable', + 'restore', + ], +}); + + +/***/ }), + +/***/ 4873: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _HashStreamValidator_crc32cHash, _HashStreamValidator_md5Hash, _HashStreamValidator_md5Digest; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HashStreamValidator = void 0; +const crypto_1 = __nccwpck_require__(6982); +const stream_1 = __nccwpck_require__(2203); +const crc32c_js_1 = __nccwpck_require__(4317); +const file_js_1 = __nccwpck_require__(9975); +class HashStreamValidator extends stream_1.Transform { + constructor(options = {}) { + super(); + this.updateHashesOnly = false; + _HashStreamValidator_crc32cHash.set(this, undefined); + _HashStreamValidator_md5Hash.set(this, undefined); + _HashStreamValidator_md5Digest.set(this, ''); + this.crc32cEnabled = !!options.crc32c; + this.md5Enabled = !!options.md5; + this.updateHashesOnly = !!options.updateHashesOnly; + this.crc32cExpected = options.crc32cExpected; + this.md5Expected = options.md5Expected; + if (this.crc32cEnabled) { + if (options.crc32cInstance) { + __classPrivateFieldSet(this, _HashStreamValidator_crc32cHash, options.crc32cInstance, "f"); + } + else { + const crc32cGenerator = options.crc32cGenerator || crc32c_js_1.CRC32C_DEFAULT_VALIDATOR_GENERATOR; + __classPrivateFieldSet(this, _HashStreamValidator_crc32cHash, crc32cGenerator(), "f"); + } + } + if (this.md5Enabled) { + __classPrivateFieldSet(this, _HashStreamValidator_md5Hash, (0, crypto_1.createHash)('md5'), "f"); + } + } + /** + * Return the current CRC32C value, if available. + */ + get crc32c() { + var _a; + return (_a = __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f")) === null || _a === void 0 ? void 0 : _a.toString(); + } + _flush(callback) { + if (__classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f")) { + __classPrivateFieldSet(this, _HashStreamValidator_md5Digest, __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f").digest('base64'), "f"); + } + if (this.updateHashesOnly) { + callback(); + return; + } + // If we're doing validation, assume the worst-- a data integrity + // mismatch. If not, these tests won't be performed, and we can assume + // the best. + // We must check if the server decompressed the data on serve because hash + // validation is not possible in this case. + let failed = this.crc32cEnabled || this.md5Enabled; + if (this.crc32cEnabled && this.crc32cExpected) { + failed = !this.test('crc32c', this.crc32cExpected); + } + if (this.md5Enabled && this.md5Expected) { + failed = !this.test('md5', this.md5Expected); + } + if (failed) { + const mismatchError = new file_js_1.RequestError(file_js_1.FileExceptionMessages.DOWNLOAD_MISMATCH); + mismatchError.code = 'CONTENT_DOWNLOAD_MISMATCH'; + callback(mismatchError); + } + else { + callback(); + } + } + _transform(chunk, encoding, callback) { + this.push(chunk, encoding); + try { + if (__classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f")) + __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f").update(chunk); + if (__classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f")) + __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f").update(chunk); + callback(); + } + catch (e) { + callback(e); + } + } + test(hash, sum) { + const check = Buffer.isBuffer(sum) ? sum.toString('base64') : sum; + if (hash === 'crc32c' && __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f")) { + return __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f").validate(check); + } + if (hash === 'md5' && __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f")) { + return __classPrivateFieldGet(this, _HashStreamValidator_md5Digest, "f") === check; + } + return false; + } +} +exports.HashStreamValidator = HashStreamValidator; +_HashStreamValidator_crc32cHash = new WeakMap(), _HashStreamValidator_md5Hash = new WeakMap(), _HashStreamValidator_md5Digest = new WeakMap(); + + +/***/ }), + +/***/ 5891: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HmacKey = void 0; +const index_js_1 = __nccwpck_require__(1325); +const storage_js_1 = __nccwpck_require__(2848); +const promisify_1 = __nccwpck_require__(206); +/** + * The API-formatted resource description of the HMAC key. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name HmacKey#metadata + * @type {object} + */ +/** + * An HmacKey object contains metadata of an HMAC key created from a + * service account through the {@link Storage} client using + * {@link Storage#createHmacKey}. + * + * See {@link https://cloud.google.com/storage/docs/authentication/hmackeys| HMAC keys documentation} + * + * @class + */ +class HmacKey extends index_js_1.ServiceObject { + /** + * @typedef {object} HmacKeyOptions + * @property {string} [projectId] The project ID of the project that owns + * the service account of the requested HMAC key. If not provided, + * the project ID used to instantiate the Storage client will be used. + */ + /** + * Constructs an HmacKey object. + * + * Note: this only create a local reference to an HMAC key, to create + * an HMAC key, use {@link Storage#createHmacKey}. + * + * @param {Storage} storage The Storage instance this HMAC key is + * attached to. + * @param {string} accessId The unique accessId for this HMAC key. + * @param {HmacKeyOptions} options Constructor configurations. + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const hmacKey = storage.hmacKey('access-id'); + * ``` + */ + constructor(storage, accessId, options) { + const methods = { + /** + * @typedef {object} DeleteHmacKeyOptions + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * @typedef {array} DeleteHmacKeyResponse + * @property {object} 0 The full API response. + */ + /** + * @callback DeleteHmacKeyCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Deletes an HMAC key. + * Key state must be set to `INACTIVE` prior to deletion. + * Caution: HMAC keys cannot be recovered once you delete them. + * + * The authenticated user must have `storage.hmacKeys.delete` permission for the project in which the key exists. + * + * @method HmacKey#delete + * @param {DeleteHmacKeyOptions} [options] Configuration options. + * @param {DeleteHmacKeyCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Delete HMAC key after making the key inactive. + * //- + * const hmacKey = storage.hmacKey('ACCESS_ID'); + * hmacKey.setMetadata({state: 'INACTIVE'}, (err, hmacKeyMetadata) => { + * if (err) { + * // The request was an error. + * console.error(err); + * return; + * } + * hmacKey.delete((err) => { + * if (err) { + * console.error(err); + * return; + * } + * // The HMAC key is deleted. + * }); + * }); + * + * //- + * // If the callback is omitted, a promise is returned. + * //- + * const hmacKey = storage.hmacKey('ACCESS_ID'); + * hmacKey + * .setMetadata({state: 'INACTIVE'}) + * .then(() => { + * return hmacKey.delete(); + * }); + * ``` + */ + delete: true, + /** + * @callback GetHmacKeyCallback + * @param {?Error} err Request error, if any. + * @param {HmacKey} hmacKey this {@link HmacKey} instance. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} GetHmacKeyResponse + * @property {HmacKey} 0 This {@link HmacKey} instance. + * @property {object} 1 The full API response. + */ + /** + * @typedef {object} GetHmacKeyOptions + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * Retrieves and populate an HMAC key's metadata, and return + * this {@link HmacKey} instance. + * + * HmacKey.get() does not give the HMAC key secret, as + * it is only returned on creation. + * + * The authenticated user must have `storage.hmacKeys.get` permission + * for the project in which the key exists. + * + * @method HmacKey#get + * @param {GetHmacKeyOptions} [options] Configuration options. + * @param {GetHmacKeyCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Get the HmacKey's Metadata. + * //- + * storage.hmacKey('ACCESS_ID') + * .get((err, hmacKey) => { + * if (err) { + * // The request was an error. + * console.error(err); + * return; + * } + * // do something with the returned HmacKey object. + * }); + * + * //- + * // If the callback is omitted, a promise is returned. + * //- + * storage.hmacKey('ACCESS_ID') + * .get() + * .then((data) => { + * const hmacKey = data[0]; + * }); + * ``` + */ + get: true, + /** + * @typedef {object} GetHmacKeyMetadataOptions + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * Retrieves and populate an HMAC key's metadata, and return + * the HMAC key's metadata as an object. + * + * HmacKey.getMetadata() does not give the HMAC key secret, as + * it is only returned on creation. + * + * The authenticated user must have `storage.hmacKeys.get` permission + * for the project in which the key exists. + * + * @method HmacKey#getMetadata + * @param {GetHmacKeyMetadataOptions} [options] Configuration options. + * @param {HmacKeyMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Get the HmacKey's metadata and populate to the metadata property. + * //- + * storage.hmacKey('ACCESS_ID') + * .getMetadata((err, hmacKeyMetadata) => { + * if (err) { + * // The request was an error. + * console.error(err); + * return; + * } + * console.log(hmacKeyMetadata); + * }); + * + * //- + * // If the callback is omitted, a promise is returned. + * //- + * storage.hmacKey('ACCESS_ID') + * .getMetadata() + * .then((data) => { + * const hmacKeyMetadata = data[0]; + * console.log(hmacKeyMetadata); + * }); + * ``` + */ + getMetadata: true, + /** + * @typedef {object} SetHmacKeyMetadata Subset of {@link HmacKeyMetadata} to update. + * @property {string} state New state of the HmacKey. Either 'ACTIVE' or 'INACTIVE'. + * @property {string} [etag] Include an etag from a previous get HMAC key request + * to perform safe read-modify-write. + */ + /** + * @typedef {object} SetHmacKeyMetadataOptions + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * @callback HmacKeyMetadataCallback + * @param {?Error} err Request error, if any. + * @param {HmacKeyMetadata} metadata The updated {@link HmacKeyMetadata} object. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} HmacKeyMetadataResponse + * @property {HmacKeyMetadata} 0 The updated {@link HmacKeyMetadata} object. + * @property {object} 1 The full API response. + */ + /** + * Updates the state of an HMAC key. See {@link SetHmacKeyMetadata} for + * valid states. + * + * @method HmacKey#setMetadata + * @param {SetHmacKeyMetadata} metadata The new metadata. + * @param {SetHmacKeyMetadataOptions} [options] Configuration options. + * @param {HmacKeyMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * const metadata = { + * state: 'INACTIVE', + * }; + * + * storage.hmacKey('ACCESS_ID') + * .setMetadata(metadata, (err, hmacKeyMetadata) => { + * if (err) { + * // The request was an error. + * console.error(err); + * return; + * } + * console.log(hmacKeyMetadata); + * }); + * + * //- + * // If the callback is omitted, a promise is returned. + * //- + * storage.hmacKey('ACCESS_ID') + * .setMetadata(metadata) + * .then((data) => { + * const hmacKeyMetadata = data[0]; + * console.log(hmacKeyMetadata); + * }); + * ``` + */ + setMetadata: { + reqOpts: { + method: 'PUT', + }, + }, + }; + const projectId = (options && options.projectId) || storage.projectId; + super({ + parent: storage, + id: accessId, + baseUrl: `/projects/${projectId}/hmacKeys`, + methods, + }); + this.storage = storage; + this.instanceRetryValue = storage.retryOptions.autoRetry; + } + setMetadata(metadata, optionsOrCallback, cb) { + // ETag preconditions are not currently supported. Retries should be disabled if the idempotency strategy is not set to RetryAlways + if (this.storage.retryOptions.idempotencyStrategy !== + storage_js_1.IdempotencyStrategy.RetryAlways) { + this.storage.retryOptions.autoRetry = false; + } + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = + typeof optionsOrCallback === 'function' + ? optionsOrCallback + : cb; + super + .setMetadata(metadata, options) + .then(resp => cb(null, ...resp)) + .catch(cb) + .finally(() => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + }); + } +} +exports.HmacKey = HmacKey; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(HmacKey); + + +/***/ }), + +/***/ 2880: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Iam = exports.IAMExceptionMessages = void 0; +const promisify_1 = __nccwpck_require__(206); +const util_js_1 = __nccwpck_require__(4557); +var IAMExceptionMessages; +(function (IAMExceptionMessages) { + IAMExceptionMessages["POLICY_OBJECT_REQUIRED"] = "A policy object is required."; + IAMExceptionMessages["PERMISSIONS_REQUIRED"] = "Permissions are required."; +})(IAMExceptionMessages || (exports.IAMExceptionMessages = IAMExceptionMessages = {})); +/** + * Get and set IAM policies for your Cloud Storage bucket. + * + * See {@link https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management| Cloud Storage IAM Management} + * See {@link https://cloud.google.com/iam/docs/granting-changing-revoking-access| Granting, Changing, and Revoking Access} + * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} + * + * @constructor Iam + * + * @param {Bucket} bucket The parent instance. + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * // bucket.iam + * ``` + */ +class Iam { + constructor(bucket) { + this.request_ = bucket.request.bind(bucket); + this.resourceId_ = 'buckets/' + bucket.getId(); + } + /** + * @typedef {object} GetPolicyOptions Requested options for IAM#getPolicy(). + * @property {number} [requestedPolicyVersion] The version of IAM policies to + * request. If a policy with a condition is requested without setting + * this, the server will return an error. This must be set to a value + * of 3 to retrieve IAM policies containing conditions. This is to + * prevent client code that isn't aware of IAM conditions from + * interpreting and modifying policies incorrectly. The service might + * return a policy with version lower than the one that was requested, + * based on the feature syntax in the policy fetched. + * See {@link https://cloud.google.com/iam/docs/policies#versions| IAM Policy versions} + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} GetPolicyResponse + * @property {Policy} 0 The policy. + * @property {object} 1 The full API response. + */ + /** + * @typedef {object} Policy + * @property {PolicyBinding[]} policy.bindings Bindings associate members with roles. + * @property {string} [policy.etag] Etags are used to perform a read-modify-write. + * @property {number} [policy.version] The syntax schema version of the Policy. + * To set an IAM policy with conditional binding, this field must be set to + * 3 or greater. + * See {@link https://cloud.google.com/iam/docs/policies#versions| IAM Policy versions} + */ + /** + * @typedef {object} PolicyBinding + * @property {string} role Role that is assigned to members. + * @property {string[]} members Specifies the identities requesting access for the bucket. + * @property {Expr} [condition] The condition that is associated with this binding. + */ + /** + * @typedef {object} Expr + * @property {string} [title] An optional title for the expression, i.e. a + * short string describing its purpose. This can be used e.g. in UIs + * which allow to enter the expression. + * @property {string} [description] An optional description of the + * expression. This is a longer text which describes the expression, + * e.g. when hovered over it in a UI. + * @property {string} expression Textual representation of an expression in + * Common Expression Language syntax. The application context of the + * containing message determines which well-known feature set of CEL + * is supported.The condition that is associated with this binding. + * + * @see [Condition] https://cloud.google.com/storage/docs/access-control/iam#conditions + */ + /** + * Get the IAM policy. + * + * @param {GetPolicyOptions} [options] Request options. + * @param {GetPolicyCallback} [callback] Callback function. + * @returns {Promise} + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/getIamPolicy| Buckets: setIamPolicy API Documentation} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * bucket.iam.getPolicy( + * {requestedPolicyVersion: 3}, + * function(err, policy, apiResponse) { + * + * }, + * ); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.getPolicy({requestedPolicyVersion: 3}) + * .then(function(data) { + * const policy = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/iam.js + * region_tag:storage_view_bucket_iam_members + * Example of retrieving a bucket's IAM policy: + */ + getPolicy(optionsOrCallback, callback) { + const { options, callback: cb } = (0, util_js_1.normalize)(optionsOrCallback, callback); + const qs = {}; + if (options.userProject) { + qs.userProject = options.userProject; + } + if (options.requestedPolicyVersion !== null && + options.requestedPolicyVersion !== undefined) { + qs.optionsRequestedPolicyVersion = options.requestedPolicyVersion; + } + this.request_({ + uri: '/iam', + qs, + }, cb); + } + /** + * Set the IAM policy. + * + * @throws {Error} If no policy is provided. + * + * @param {Policy} policy The policy. + * @param {SetPolicyOptions} [options] Configuration options. + * @param {SetPolicyCallback} callback Callback function. + * @returns {Promise} + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/setIamPolicy| Buckets: setIamPolicy API Documentation} + * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * const myPolicy = { + * bindings: [ + * { + * role: 'roles/storage.admin', + * members: + * ['serviceAccount:myotherproject@appspot.gserviceaccount.com'] + * } + * ] + * }; + * + * bucket.iam.setPolicy(myPolicy, function(err, policy, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.setPolicy(myPolicy).then(function(data) { + * const policy = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/iam.js + * region_tag:storage_add_bucket_iam_member + * Example of adding to a bucket's IAM policy: + * + * @example include:samples/iam.js + * region_tag:storage_remove_bucket_iam_member + * Example of removing from a bucket's IAM policy: + */ + setPolicy(policy, optionsOrCallback, callback) { + if (policy === null || typeof policy !== 'object') { + throw new Error(IAMExceptionMessages.POLICY_OBJECT_REQUIRED); + } + const { options, callback: cb } = (0, util_js_1.normalize)(optionsOrCallback, callback); + let maxRetries; + if (policy.etag === undefined) { + maxRetries = 0; + } + this.request_({ + method: 'PUT', + uri: '/iam', + maxRetries, + json: Object.assign({ + resourceId: this.resourceId_, + }, policy), + qs: options, + }, cb); + } + /** + * Test a set of permissions for a resource. + * + * @throws {Error} If permissions are not provided. + * + * @param {string|string[]} permissions The permission(s) to test for. + * @param {TestIamPermissionsOptions} [options] Configuration object. + * @param {TestIamPermissionsCallback} [callback] Callback function. + * @returns {Promise} + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/testIamPermissions| Buckets: testIamPermissions API Documentation} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * //- + * // Test a single permission. + * //- + * const test = 'storage.buckets.delete'; + * + * bucket.iam.testPermissions(test, function(err, permissions, apiResponse) { + * console.log(permissions); + * // { + * // "storage.buckets.delete": true + * // } + * }); + * + * //- + * // Test several permissions at once. + * //- + * const tests = [ + * 'storage.buckets.delete', + * 'storage.buckets.get' + * ]; + * + * bucket.iam.testPermissions(tests, function(err, permissions) { + * console.log(permissions); + * // { + * // "storage.buckets.delete": false, + * // "storage.buckets.get": true + * // } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.testPermissions(test).then(function(data) { + * const permissions = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + testPermissions(permissions, optionsOrCallback, callback) { + if (!Array.isArray(permissions) && typeof permissions !== 'string') { + throw new Error(IAMExceptionMessages.PERMISSIONS_REQUIRED); + } + const { options, callback: cb } = (0, util_js_1.normalize)(optionsOrCallback, callback); + const permissionsArray = Array.isArray(permissions) + ? permissions + : [permissions]; + const req = Object.assign({ + permissions: permissionsArray, + }, options); + this.request_({ + uri: '/iam/testPermissions', + qs: req, + useQuerystring: true, + }, (err, resp) => { + if (err) { + cb(err, null, resp); + return; + } + const availablePermissions = Array.isArray(resp.permissions) + ? resp.permissions + : []; + const permissionsHash = permissionsArray.reduce((acc, permission) => { + acc[permission] = availablePermissions.indexOf(permission) > -1; + return acc; + }, {}); + cb(null, permissionsHash, resp); + }); + } +} +exports.Iam = Iam; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Iam); + + +/***/ }), + +/***/ 8525: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Notification = exports.Iam = exports.HmacKey = exports.File = exports.Channel = exports.Bucket = exports.Storage = exports.RETRYABLE_ERR_FN_DEFAULT = exports.IdempotencyStrategy = exports.ApiError = void 0; +/** + * The `@google-cloud/storage` package has a single named export which is the + * {@link Storage} (ES6) class, which should be instantiated with `new`. + * + * See {@link Storage} and {@link ClientConfig} for client methods and + * configuration options. + * + * @module {Storage} @google-cloud/storage + * @alias nodejs-storage + * + * @example + * Install the client library with npm: + * ``` + * npm install --save @google-cloud/storage + * ``` + * + * @example + * Import the client library + * ``` + * const {Storage} = require('@google-cloud/storage'); + * ``` + * + * @example + * Create a client that uses Application + * Default Credentials (ADC): + * ``` + * const storage = new Storage(); + * ``` + * + * @example + * Create a client with explicit + * credentials: + * ``` + * const storage = new Storage({ projectId: + * 'your-project-id', keyFilename: '/path/to/keyfile.json' + * }); + * ``` + * + * @example include:samples/quickstart.js + * region_tag:storage_quickstart + * Full quickstart example: + */ +var index_js_1 = __nccwpck_require__(1325); +Object.defineProperty(exports, "ApiError", ({ enumerable: true, get: function () { return index_js_1.ApiError; } })); +var storage_js_1 = __nccwpck_require__(2848); +Object.defineProperty(exports, "IdempotencyStrategy", ({ enumerable: true, get: function () { return storage_js_1.IdempotencyStrategy; } })); +Object.defineProperty(exports, "RETRYABLE_ERR_FN_DEFAULT", ({ enumerable: true, get: function () { return storage_js_1.RETRYABLE_ERR_FN_DEFAULT; } })); +Object.defineProperty(exports, "Storage", ({ enumerable: true, get: function () { return storage_js_1.Storage; } })); +var bucket_js_1 = __nccwpck_require__(2887); +Object.defineProperty(exports, "Bucket", ({ enumerable: true, get: function () { return bucket_js_1.Bucket; } })); +__exportStar(__nccwpck_require__(4317), exports); +var channel_js_1 = __nccwpck_require__(2658); +Object.defineProperty(exports, "Channel", ({ enumerable: true, get: function () { return channel_js_1.Channel; } })); +var file_js_1 = __nccwpck_require__(9975); +Object.defineProperty(exports, "File", ({ enumerable: true, get: function () { return file_js_1.File; } })); +__exportStar(__nccwpck_require__(4873), exports); +var hmacKey_js_1 = __nccwpck_require__(5891); +Object.defineProperty(exports, "HmacKey", ({ enumerable: true, get: function () { return hmacKey_js_1.HmacKey; } })); +var iam_js_1 = __nccwpck_require__(2880); +Object.defineProperty(exports, "Iam", ({ enumerable: true, get: function () { return iam_js_1.Iam; } })); +var notification_js_1 = __nccwpck_require__(8598); +Object.defineProperty(exports, "Notification", ({ enumerable: true, get: function () { return notification_js_1.Notification; } })); +__exportStar(__nccwpck_require__(4), exports); + + +/***/ }), + +/***/ 1325: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.util = exports.ApiError = exports.ServiceObject = exports.Service = void 0; +var service_js_1 = __nccwpck_require__(8542); +Object.defineProperty(exports, "Service", ({ enumerable: true, get: function () { return service_js_1.Service; } })); +var service_object_js_1 = __nccwpck_require__(2908); +Object.defineProperty(exports, "ServiceObject", ({ enumerable: true, get: function () { return service_object_js_1.ServiceObject; } })); +var util_js_1 = __nccwpck_require__(7325); +Object.defineProperty(exports, "ApiError", ({ enumerable: true, get: function () { return util_js_1.ApiError; } })); +Object.defineProperty(exports, "util", ({ enumerable: true, get: function () { return util_js_1.util; } })); + + +/***/ }), + +/***/ 2908: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ServiceObject = void 0; +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +const promisify_1 = __nccwpck_require__(206); +const events_1 = __nccwpck_require__(4434); +const util_js_1 = __nccwpck_require__(7325); +/** + * ServiceObject is a base class, meant to be inherited from by a "service + * object," like a BigQuery dataset or Storage bucket. + * + * Most of the time, these objects share common functionality; they can be + * created or deleted, and you can get or set their metadata. + * + * By inheriting from this class, a service object will be extended with these + * shared behaviors. Note that any method can be overridden when the service + * object requires specific behavior. + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +class ServiceObject extends events_1.EventEmitter { + /* + * @constructor + * @alias module:common/service-object + * + * @private + * + * @param {object} config - Configuration object. + * @param {string} config.baseUrl - The base URL to make API requests to. + * @param {string} config.createMethod - The method which creates this object. + * @param {string=} config.id - The identifier of the object. For example, the + * name of a Storage bucket or Pub/Sub topic. + * @param {object=} config.methods - A map of each method name that should be inherited. + * @param {object} config.methods[].reqOpts - Default request options for this + * particular method. A common use case is when `setMetadata` requires a + * `PUT` method to override the default `PATCH`. + * @param {object} config.parent - The parent service instance. For example, an + * instance of Storage if the object is Bucket. + */ + constructor(config) { + super(); + this.metadata = {}; + this.baseUrl = config.baseUrl; + this.parent = config.parent; // Parent class. + this.id = config.id; // Name or ID (e.g. dataset ID, bucket name, etc). + this.createMethod = config.createMethod; + this.methods = config.methods || {}; + this.interceptors = []; + this.projectId = config.projectId; + if (config.methods) { + // This filters the ServiceObject instance (e.g. a "File") to only have + // the configured methods. We make a couple of exceptions for core- + // functionality ("request()" and "getRequestInterceptors()") + Object.getOwnPropertyNames(ServiceObject.prototype) + .filter(methodName => { + return ( + // All ServiceObjects need `request` and `getRequestInterceptors`. + // clang-format off + !/^request/.test(methodName) && + !/^getRequestInterceptors/.test(methodName) && + // clang-format on + // The ServiceObject didn't redefine the method. + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this[methodName] === + // eslint-disable-next-line @typescript-eslint/no-explicit-any + ServiceObject.prototype[methodName] && + // This method isn't wanted. + !config.methods[methodName]); + }) + .forEach(methodName => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this[methodName] = undefined; + }); + } + } + create(optionsOrCallback, callback) { + // eslint-disable-next-line @typescript-eslint/no-this-alias + const self = this; + const args = [this.id]; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + if (typeof optionsOrCallback === 'object') { + args.push(optionsOrCallback); + } + // Wrap the callback to return *this* instance of the object, not the + // newly-created one. + // tslint: disable-next-line no-any + function onCreate(...args) { + const [err, instance] = args; + if (!err) { + self.metadata = instance.metadata; + if (self.id && instance.metadata) { + self.id = instance.metadata.id; + } + args[1] = self; // replace the created `instance` with this one. + } + callback(...args); + } + args.push(onCreate); + // eslint-disable-next-line prefer-spread + this.createMethod.apply(null, args); + } + delete(optionsOrCallback, cb) { + var _a; + const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + const ignoreNotFound = options.ignoreNotFound; + delete options.ignoreNotFound; + const methodConfig = (typeof this.methods.delete === 'object' && this.methods.delete) || {}; + const reqOpts = { + method: 'DELETE', + uri: '', + ...methodConfig.reqOpts, + qs: { + ...(_a = methodConfig.reqOpts) === null || _a === void 0 ? void 0 : _a.qs, + ...options, + }, + }; + // The `request` method may have been overridden to hold any special + // behavior. Ensure we call the original `request` method. + ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => { + if (err) { + if (err.code === 404 && ignoreNotFound) { + err = null; + } + } + callback(err, res); + }); + } + exists(optionsOrCallback, cb) { + const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + this.get(options, err => { + if (err) { + if (err.code === 404) { + callback(null, false); + } + else { + callback(err); + } + return; + } + callback(null, true); + }); + } + get(optionsOrCallback, cb) { + // eslint-disable-next-line @typescript-eslint/no-this-alias + const self = this; + const [opts, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + const options = Object.assign({}, opts); + const autoCreate = options.autoCreate && typeof this.create === 'function'; + delete options.autoCreate; + function onCreate(err, instance, apiResponse) { + if (err) { + if (err.code === 409) { + self.get(options, callback); + return; + } + callback(err, null, apiResponse); + return; + } + callback(null, instance, apiResponse); + } + this.getMetadata(options, (err, metadata) => { + if (err) { + if (err.code === 404 && autoCreate) { + const args = []; + if (Object.keys(options).length > 0) { + args.push(options); + } + args.push(onCreate); + self.create(...args); + return; + } + callback(err, null, metadata); + return; + } + callback(null, self, metadata); + }); + } + getMetadata(optionsOrCallback, cb) { + var _a; + const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + const methodConfig = (typeof this.methods.getMetadata === 'object' && + this.methods.getMetadata) || + {}; + const reqOpts = { + uri: '', + ...methodConfig.reqOpts, + qs: { + ...(_a = methodConfig.reqOpts) === null || _a === void 0 ? void 0 : _a.qs, + ...options, + }, + }; + // The `request` method may have been overridden to hold any special + // behavior. Ensure we call the original `request` method. + ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => { + this.metadata = body; + callback(err, this.metadata, res); + }); + } + /** + * Return the user's custom request interceptors. + */ + getRequestInterceptors() { + // Interceptors should be returned in the order they were assigned. + const localInterceptors = this.interceptors + .filter(interceptor => typeof interceptor.request === 'function') + .map(interceptor => interceptor.request); + return this.parent.getRequestInterceptors().concat(localInterceptors); + } + setMetadata(metadata, optionsOrCallback, cb) { + var _a, _b; + const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + const methodConfig = (typeof this.methods.setMetadata === 'object' && + this.methods.setMetadata) || + {}; + const reqOpts = { + method: 'PATCH', + uri: '', + ...methodConfig.reqOpts, + json: { + ...(_a = methodConfig.reqOpts) === null || _a === void 0 ? void 0 : _a.json, + ...metadata, + }, + qs: { + ...(_b = methodConfig.reqOpts) === null || _b === void 0 ? void 0 : _b.qs, + ...options, + }, + }; + // The `request` method may have been overridden to hold any special + // behavior. Ensure we call the original `request` method. + ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => { + this.metadata = body; + callback(err, this.metadata, res); + }); + } + request_(reqOpts, callback) { + reqOpts = { ...reqOpts }; + if (this.projectId) { + reqOpts.projectId = this.projectId; + } + const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0; + const uriComponents = [this.baseUrl, this.id || '', reqOpts.uri]; + if (isAbsoluteUrl) { + uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri)); + } + reqOpts.uri = uriComponents + .filter(x => x.trim()) // Limit to non-empty strings. + .map(uriComponent => { + const trimSlashesRegex = /^\/*|\/*$/g; + return uriComponent.replace(trimSlashesRegex, ''); + }) + .join('/'); + const childInterceptors = Array.isArray(reqOpts.interceptors_) + ? reqOpts.interceptors_ + : []; + const localInterceptors = [].slice.call(this.interceptors); + reqOpts.interceptors_ = childInterceptors.concat(localInterceptors); + if (reqOpts.shouldReturnStream) { + return this.parent.requestStream(reqOpts); + } + this.parent.request(reqOpts, callback); + } + request(reqOpts, callback) { + this.request_(reqOpts, callback); + } + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + */ + requestStream(reqOpts) { + const opts = { ...reqOpts, shouldReturnStream: true }; + return this.request_(opts); + } +} +exports.ServiceObject = ServiceObject; +(0, promisify_1.promisifyAll)(ServiceObject, { exclude: ['getRequestInterceptors'] }); + + +/***/ }), + +/***/ 8542: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Service = exports.DEFAULT_PROJECT_ID_TOKEN = void 0; +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +const google_auth_library_1 = __nccwpck_require__(492); +const uuid = __importStar(__nccwpck_require__(2048)); +const util_js_1 = __nccwpck_require__(7325); +const util_js_2 = __nccwpck_require__(4557); +exports.DEFAULT_PROJECT_ID_TOKEN = '{{projectId}}'; +class Service { + /** + * Service is a base class, meant to be inherited from by a "service," like + * BigQuery or Storage. + * + * This handles making authenticated requests by exposing a `makeReq_` + * function. + * + * @constructor + * @alias module:common/service + * + * @param {object} config - Configuration object. + * @param {string} config.baseUrl - The base URL to make API requests to. + * @param {string[]} config.scopes - The scopes required for the request. + * @param {object=} options - [Configuration object](#/docs). + */ + constructor(config, options = {}) { + this.baseUrl = config.baseUrl; + this.apiEndpoint = config.apiEndpoint; + this.timeout = options.timeout; + this.globalInterceptors = Array.isArray(options.interceptors_) + ? options.interceptors_ + : []; + this.interceptors = []; + this.packageJson = config.packageJson; + this.projectId = options.projectId || exports.DEFAULT_PROJECT_ID_TOKEN; + this.projectIdRequired = config.projectIdRequired !== false; + this.providedUserAgent = options.userAgent; + this.universeDomain = options.universeDomain || google_auth_library_1.DEFAULT_UNIVERSE; + this.customEndpoint = config.customEndpoint || false; + this.makeAuthenticatedRequest = util_js_1.util.makeAuthenticatedRequestFactory({ + ...config, + projectIdRequired: this.projectIdRequired, + projectId: this.projectId, + authClient: options.authClient || config.authClient, + credentials: options.credentials, + keyFile: options.keyFilename, + email: options.email, + clientOptions: { + universeDomain: options.universeDomain, + ...options.clientOptions, + }, + }); + this.authClient = this.makeAuthenticatedRequest.authClient; + const isCloudFunctionEnv = !!process.env.FUNCTION_NAME; + if (isCloudFunctionEnv) { + this.interceptors.push({ + request(reqOpts) { + reqOpts.forever = false; + return reqOpts; + }, + }); + } + } + /** + * Return the user's custom request interceptors. + */ + getRequestInterceptors() { + // Interceptors should be returned in the order they were assigned. + return [].slice + .call(this.globalInterceptors) + .concat(this.interceptors) + .filter(interceptor => typeof interceptor.request === 'function') + .map(interceptor => interceptor.request); + } + getProjectId(callback) { + if (!callback) { + return this.getProjectIdAsync(); + } + this.getProjectIdAsync().then(p => callback(null, p), callback); + } + async getProjectIdAsync() { + const projectId = await this.authClient.getProjectId(); + if (this.projectId === exports.DEFAULT_PROJECT_ID_TOKEN && projectId) { + this.projectId = projectId; + } + return this.projectId; + } + request_(reqOpts, callback) { + reqOpts = { ...reqOpts, timeout: this.timeout }; + const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0; + const uriComponents = [this.baseUrl]; + if (this.projectIdRequired) { + if (reqOpts.projectId) { + uriComponents.push('projects'); + uriComponents.push(reqOpts.projectId); + } + else { + uriComponents.push('projects'); + uriComponents.push(this.projectId); + } + } + uriComponents.push(reqOpts.uri); + if (isAbsoluteUrl) { + uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri)); + } + reqOpts.uri = uriComponents + .map(uriComponent => { + const trimSlashesRegex = /^\/*|\/*$/g; + return uriComponent.replace(trimSlashesRegex, ''); + }) + .join('/') + // Some URIs have colon separators. + // Bad: https://.../projects/:list + // Good: https://.../projects:list + .replace(/\/:/g, ':'); + const requestInterceptors = this.getRequestInterceptors(); + const interceptorArray = Array.isArray(reqOpts.interceptors_) + ? reqOpts.interceptors_ + : []; + interceptorArray.forEach(interceptor => { + if (typeof interceptor.request === 'function') { + requestInterceptors.push(interceptor.request); + } + }); + requestInterceptors.forEach(requestInterceptor => { + reqOpts = requestInterceptor(reqOpts); + }); + delete reqOpts.interceptors_; + const pkg = this.packageJson; + let userAgent = (0, util_js_2.getUserAgentString)(); + if (this.providedUserAgent) { + userAgent = `${this.providedUserAgent} ${userAgent}`; + } + reqOpts.headers = { + ...reqOpts.headers, + 'User-Agent': userAgent, + 'x-goog-api-client': `${(0, util_js_2.getRuntimeTrackingString)()} gccl/${pkg.version}-${(0, util_js_2.getModuleFormat)()} gccl-invocation-id/${uuid.v4()}`, + }; + if (reqOpts[util_js_1.GCCL_GCS_CMD_KEY]) { + reqOpts.headers['x-goog-api-client'] += + ` gccl-gcs-cmd/${reqOpts[util_js_1.GCCL_GCS_CMD_KEY]}`; + } + if (reqOpts.shouldReturnStream) { + return this.makeAuthenticatedRequest(reqOpts); + } + else { + this.makeAuthenticatedRequest(reqOpts, callback); + } + } + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + * @param {function} callback - The callback function passed to `request`. + */ + request(reqOpts, callback) { + Service.prototype.request_.call(this, reqOpts, callback); + } + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + */ + requestStream(reqOpts) { + const opts = { ...reqOpts, shouldReturnStream: true }; + return Service.prototype.request_.call(this, opts); + } +} +exports.Service = Service; + + +/***/ }), + +/***/ 7325: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.util = exports.Util = exports.PartialFailureError = exports.ApiError = exports.GCCL_GCS_CMD_KEY = void 0; +/*! + * @module common/util + */ +const projectify_1 = __nccwpck_require__(7635); +const htmlEntities = __importStar(__nccwpck_require__(3660)); +const google_auth_library_1 = __nccwpck_require__(492); +const retry_request_1 = __importDefault(__nccwpck_require__(7842)); +const stream_1 = __nccwpck_require__(2203); +const teeny_request_1 = __nccwpck_require__(321); +const uuid = __importStar(__nccwpck_require__(2048)); +const service_js_1 = __nccwpck_require__(8542); +const util_js_1 = __nccwpck_require__(4557); +const duplexify_1 = __importDefault(__nccwpck_require__(9112)); +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +const package_json_helper_cjs_1 = __nccwpck_require__(1969); +const packageJson = (0, package_json_helper_cjs_1.getPackageJSON)(); +/** + * A unique symbol for providing a `gccl-gcs-cmd` value + * for the `X-Goog-API-Client` header. + * + * E.g. the `V` in `X-Goog-API-Client: gccl-gcs-cmd/V` + **/ +exports.GCCL_GCS_CMD_KEY = Symbol.for('GCCL_GCS_CMD'); +const requestDefaults = { + timeout: 60000, + gzip: true, + forever: true, + pool: { + maxSockets: Infinity, + }, +}; +/** + * Default behavior: Automatically retry retriable server errors. + * + * @const {boolean} + * @private + */ +const AUTO_RETRY_DEFAULT = true; +/** + * Default behavior: Only attempt to retry retriable errors 3 times. + * + * @const {number} + * @private + */ +const MAX_RETRY_DEFAULT = 3; +/** + * Custom error type for API errors. + * + * @param {object} errorBody - Error object. + */ +class ApiError extends Error { + constructor(errorBodyOrMessage) { + super(); + if (typeof errorBodyOrMessage !== 'object') { + this.message = errorBodyOrMessage || ''; + return; + } + const errorBody = errorBodyOrMessage; + this.code = errorBody.code; + this.errors = errorBody.errors; + this.response = errorBody.response; + try { + this.errors = JSON.parse(this.response.body).error.errors; + } + catch (e) { + this.errors = errorBody.errors; + } + this.message = ApiError.createMultiErrorMessage(errorBody, this.errors); + Error.captureStackTrace(this); + } + /** + * Pieces together an error message by combining all unique error messages + * returned from a single GoogleError + * + * @private + * + * @param {GoogleErrorBody} err The original error. + * @param {GoogleInnerError[]} [errors] Inner errors, if any. + * @returns {string} + */ + static createMultiErrorMessage(err, errors) { + const messages = new Set(); + if (err.message) { + messages.add(err.message); + } + if (errors && errors.length) { + errors.forEach(({ message }) => messages.add(message)); + } + else if (err.response && err.response.body) { + messages.add(htmlEntities.decode(err.response.body.toString())); + } + else if (!err.message) { + messages.add('A failure occurred during this request.'); + } + let messageArr = Array.from(messages); + if (messageArr.length > 1) { + messageArr = messageArr.map((message, i) => ` ${i + 1}. ${message}`); + messageArr.unshift('Multiple errors occurred during the request. Please see the `errors` array for complete details.\n'); + messageArr.push('\n'); + } + return messageArr.join('\n'); + } +} +exports.ApiError = ApiError; +/** + * Custom error type for partial errors returned from the API. + * + * @param {object} b - Error object. + */ +class PartialFailureError extends Error { + constructor(b) { + super(); + const errorObject = b; + this.errors = errorObject.errors; + this.name = 'PartialFailureError'; + this.response = errorObject.response; + this.message = ApiError.createMultiErrorMessage(errorObject, this.errors); + } +} +exports.PartialFailureError = PartialFailureError; +class Util { + constructor() { + this.ApiError = ApiError; + this.PartialFailureError = PartialFailureError; + } + /** + * No op. + * + * @example + * function doSomething(callback) { + * callback = callback || noop; + * } + */ + noop() { } + /** + * Uniformly process an API response. + * + * @param {*} err - Error value. + * @param {*} resp - Response value. + * @param {*} body - Body value. + * @param {function} callback - The callback function. + */ + handleResp(err, resp, body, callback) { + callback = callback || util.noop; + const parsedResp = { + err: err || null, + ...(resp && util.parseHttpRespMessage(resp)), + ...(body && util.parseHttpRespBody(body)), + }; + // Assign the parsed body to resp.body, even if { json: false } was passed + // as a request option. + // We assume that nobody uses the previously unparsed value of resp.body. + if (!parsedResp.err && resp && typeof parsedResp.body === 'object') { + parsedResp.resp.body = parsedResp.body; + } + if (parsedResp.err && resp) { + parsedResp.err.response = resp; + } + callback(parsedResp.err, parsedResp.body, parsedResp.resp); + } + /** + * Sniff an incoming HTTP response message for errors. + * + * @param {object} httpRespMessage - An incoming HTTP response message from `request`. + * @return {object} parsedHttpRespMessage - The parsed response. + * @param {?error} parsedHttpRespMessage.err - An error detected. + * @param {object} parsedHttpRespMessage.resp - The original response object. + */ + parseHttpRespMessage(httpRespMessage) { + const parsedHttpRespMessage = { + resp: httpRespMessage, + }; + if (httpRespMessage.statusCode < 200 || httpRespMessage.statusCode > 299) { + // Unknown error. Format according to ApiError standard. + parsedHttpRespMessage.err = new ApiError({ + errors: new Array(), + code: httpRespMessage.statusCode, + message: httpRespMessage.statusMessage, + response: httpRespMessage, + }); + } + return parsedHttpRespMessage; + } + /** + * Parse the response body from an HTTP request. + * + * @param {object} body - The response body. + * @return {object} parsedHttpRespMessage - The parsed response. + * @param {?error} parsedHttpRespMessage.err - An error detected. + * @param {object} parsedHttpRespMessage.body - The original body value provided + * will try to be JSON.parse'd. If it's successful, the parsed value will + * be returned here, otherwise the original value and an error will be returned. + */ + parseHttpRespBody(body) { + const parsedHttpRespBody = { + body, + }; + if (typeof body === 'string') { + try { + parsedHttpRespBody.body = JSON.parse(body); + } + catch (err) { + parsedHttpRespBody.body = body; + } + } + if (parsedHttpRespBody.body && parsedHttpRespBody.body.error) { + // Error from JSON API. + parsedHttpRespBody.err = new ApiError(parsedHttpRespBody.body.error); + } + return parsedHttpRespBody; + } + /** + * Take a Duplexify stream, fetch an authenticated connection header, and + * create an outgoing writable stream. + * + * @param {Duplexify} dup - Duplexify stream. + * @param {object} options - Configuration object. + * @param {module:common/connection} options.connection - A connection instance used to get a token with and send the request through. + * @param {object} options.metadata - Metadata to send at the head of the request. + * @param {object} options.request - Request object, in the format of a standard Node.js http.request() object. + * @param {string=} options.request.method - Default: "POST". + * @param {string=} options.request.qs.uploadType - Default: "multipart". + * @param {string=} options.streamContentType - Default: "application/octet-stream". + * @param {function} onComplete - Callback, executed after the writable Request stream has completed. + */ + makeWritableStream(dup, options, onComplete) { + var _a; + onComplete = onComplete || util.noop; + const writeStream = new ProgressStream(); + writeStream.on('progress', evt => dup.emit('progress', evt)); + dup.setWritable(writeStream); + const defaultReqOpts = { + method: 'POST', + qs: { + uploadType: 'multipart', + }, + timeout: 0, + maxRetries: 0, + }; + const metadata = options.metadata || {}; + const reqOpts = { + ...defaultReqOpts, + ...options.request, + qs: { + ...defaultReqOpts.qs, + ...(_a = options.request) === null || _a === void 0 ? void 0 : _a.qs, + }, + multipart: [ + { + 'Content-Type': 'application/json', + body: JSON.stringify(metadata), + }, + { + 'Content-Type': metadata.contentType || 'application/octet-stream', + body: writeStream, + }, + ], + }; + options.makeAuthenticatedRequest(reqOpts, { + onAuthenticated(err, authenticatedReqOpts) { + if (err) { + dup.destroy(err); + return; + } + requestDefaults.headers = util._getDefaultHeaders(reqOpts[exports.GCCL_GCS_CMD_KEY]); + const request = teeny_request_1.teenyRequest.defaults(requestDefaults); + request(authenticatedReqOpts, (err, resp, body) => { + util.handleResp(err, resp, body, (err, data) => { + if (err) { + dup.destroy(err); + return; + } + dup.emit('response', resp); + onComplete(data); + }); + }); + }, + }); + } + /** + * Returns true if the API request should be retried, given the error that was + * given the first time the request was attempted. This is used for rate limit + * related errors as well as intermittent server errors. + * + * @param {error} err - The API error to check if it is appropriate to retry. + * @return {boolean} True if the API request should be retried, false otherwise. + */ + shouldRetryRequest(err) { + if (err) { + if ([408, 429, 500, 502, 503, 504].indexOf(err.code) !== -1) { + return true; + } + if (err.errors) { + for (const e of err.errors) { + const reason = e.reason; + if (reason === 'rateLimitExceeded') { + return true; + } + if (reason === 'userRateLimitExceeded') { + return true; + } + if (reason && reason.includes('EAI_AGAIN')) { + return true; + } + } + } + } + return false; + } + /** + * Get a function for making authenticated requests. + * + * @param {object} config - Configuration object. + * @param {boolean=} config.autoRetry - Automatically retry requests if the + * response is related to rate limits or certain intermittent server + * errors. We will exponentially backoff subsequent requests by default. + * (default: true) + * @param {object=} config.credentials - Credentials object. + * @param {boolean=} config.customEndpoint - If true, just return the provided request options. Default: false. + * @param {boolean=} config.useAuthWithCustomEndpoint - If true, will authenticate when using a custom endpoint. Default: false. + * @param {string=} config.email - Account email address, required for PEM/P12 usage. + * @param {number=} config.maxRetries - Maximum number of automatic retries attempted before returning the error. (default: 3) + * @param {string=} config.keyFile - Path to a .json, .pem, or .p12 keyfile. + * @param {array} config.scopes - Array of scopes required for the API. + */ + makeAuthenticatedRequestFactory(config) { + const googleAutoAuthConfig = { ...config }; + if (googleAutoAuthConfig.projectId === service_js_1.DEFAULT_PROJECT_ID_TOKEN) { + delete googleAutoAuthConfig.projectId; + } + let authClient; + if (googleAutoAuthConfig.authClient instanceof google_auth_library_1.GoogleAuth) { + // Use an existing `GoogleAuth` + authClient = googleAutoAuthConfig.authClient; + } + else { + // Pass an `AuthClient` & `clientOptions` to `GoogleAuth`, if available + authClient = new google_auth_library_1.GoogleAuth({ + ...googleAutoAuthConfig, + authClient: googleAutoAuthConfig.authClient, + clientOptions: googleAutoAuthConfig.clientOptions, + }); + } + function makeAuthenticatedRequest(reqOpts, optionsOrCallback) { + let stream; + let projectId; + const reqConfig = { ...config }; + let activeRequest_; + if (!optionsOrCallback) { + stream = (0, duplexify_1.default)(); + reqConfig.stream = stream; + } + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : undefined; + const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : undefined; + async function setProjectId() { + projectId = await authClient.getProjectId(); + } + const onAuthenticated = async (err, authenticatedReqOpts) => { + const authLibraryError = err; + const autoAuthFailed = err && + typeof err.message === 'string' && + err.message.indexOf('Could not load the default credentials') > -1; + if (autoAuthFailed) { + // Even though authentication failed, the API might not actually + // care. + authenticatedReqOpts = reqOpts; + } + if (!err || autoAuthFailed) { + try { + // Try with existing `projectId` value + authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId); + err = null; + } + catch (e) { + if (e instanceof projectify_1.MissingProjectIdError) { + // A `projectId` was required, but we don't have one. + try { + // Attempt to get the `projectId` + await setProjectId(); + authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId); + err = null; + } + catch (e) { + // Re-use the "Could not load the default credentials error" if + // auto auth failed. + err = err || e; + } + } + else { + // Some other error unrelated to missing `projectId` + err = err || e; + } + } + } + if (err) { + if (stream) { + stream.destroy(err); + } + else { + const fn = options && options.onAuthenticated + ? options.onAuthenticated + : callback; + fn(err); + } + return; + } + if (options && options.onAuthenticated) { + options.onAuthenticated(null, authenticatedReqOpts); + } + else { + activeRequest_ = util.makeRequest(authenticatedReqOpts, reqConfig, (apiResponseError, ...params) => { + if (apiResponseError && + apiResponseError.code === 401 && + authLibraryError) { + // Re-use the "Could not load the default credentials error" if + // the API request failed due to missing credentials. + apiResponseError = authLibraryError; + } + callback(apiResponseError, ...params); + }); + } + }; + const prepareRequest = async () => { + try { + const getProjectId = async () => { + if (config.projectId && + config.projectId !== service_js_1.DEFAULT_PROJECT_ID_TOKEN) { + // The user provided a project ID. We don't need to check with the + // auth client, it could be incorrect. + return config.projectId; + } + if (config.projectIdRequired === false) { + // A projectId is not required. Return the default. + return service_js_1.DEFAULT_PROJECT_ID_TOKEN; + } + return setProjectId(); + }; + const authorizeRequest = async () => { + if (reqConfig.customEndpoint && + !reqConfig.useAuthWithCustomEndpoint) { + // Using a custom API override. Do not use `google-auth-library` for + // authentication. (ex: connecting to a local Datastore server) + return reqOpts; + } + else { + return authClient.authorizeRequest(reqOpts); + } + }; + const [_projectId, authorizedReqOpts] = await Promise.all([ + getProjectId(), + authorizeRequest(), + ]); + if (_projectId) { + projectId = _projectId; + } + return onAuthenticated(null, authorizedReqOpts); + } + catch (e) { + return onAuthenticated(e); + } + }; + prepareRequest(); + if (stream) { + return stream; + } + return { + abort() { + setImmediate(() => { + if (activeRequest_) { + activeRequest_.abort(); + activeRequest_ = null; + } + }); + }, + }; + } + const mar = makeAuthenticatedRequest; + mar.getCredentials = authClient.getCredentials.bind(authClient); + mar.authClient = authClient; + return mar; + } + /** + * Make a request through the `retryRequest` module with built-in error + * handling and exponential back off. + * + * @param {object} reqOpts - Request options in the format `request` expects. + * @param {object=} config - Configuration object. + * @param {boolean=} config.autoRetry - Automatically retry requests if the + * response is related to rate limits or certain intermittent server + * errors. We will exponentially backoff subsequent requests by default. + * (default: true) + * @param {number=} config.maxRetries - Maximum number of automatic retries + * attempted before returning the error. (default: 3) + * @param {object=} config.request - HTTP module for request calls. + * @param {function} callback - The callback function. + */ + makeRequest(reqOpts, config, callback) { + var _a, _b, _c, _d, _e; + let autoRetryValue = AUTO_RETRY_DEFAULT; + if (config.autoRetry !== undefined) { + autoRetryValue = config.autoRetry; + } + else if (((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.autoRetry) !== undefined) { + autoRetryValue = config.retryOptions.autoRetry; + } + let maxRetryValue = MAX_RETRY_DEFAULT; + if (config.maxRetries !== undefined) { + maxRetryValue = config.maxRetries; + } + else if (((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.maxRetries) !== undefined) { + maxRetryValue = config.retryOptions.maxRetries; + } + requestDefaults.headers = this._getDefaultHeaders(reqOpts[exports.GCCL_GCS_CMD_KEY]); + const options = { + request: teeny_request_1.teenyRequest.defaults(requestDefaults), + retries: autoRetryValue !== false ? maxRetryValue : 0, + noResponseRetries: autoRetryValue !== false ? maxRetryValue : 0, + shouldRetryFn(httpRespMessage) { + var _a, _b; + const err = util.parseHttpRespMessage(httpRespMessage).err; + if ((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.retryableErrorFn) { + return err && ((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.retryableErrorFn(err)); + } + return err && util.shouldRetryRequest(err); + }, + maxRetryDelay: (_c = config.retryOptions) === null || _c === void 0 ? void 0 : _c.maxRetryDelay, + retryDelayMultiplier: (_d = config.retryOptions) === null || _d === void 0 ? void 0 : _d.retryDelayMultiplier, + totalTimeout: (_e = config.retryOptions) === null || _e === void 0 ? void 0 : _e.totalTimeout, + }; + if (typeof reqOpts.maxRetries === 'number') { + options.retries = reqOpts.maxRetries; + options.noResponseRetries = reqOpts.maxRetries; + } + if (!config.stream) { + return (0, retry_request_1.default)(reqOpts, options, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (err, response, body) => { + util.handleResp(err, response, body, callback); + }); + } + const dup = config.stream; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let requestStream; + const isGetRequest = (reqOpts.method || 'GET').toUpperCase() === 'GET'; + if (isGetRequest) { + requestStream = (0, retry_request_1.default)(reqOpts, options); + dup.setReadable(requestStream); + } + else { + // Streaming writable HTTP requests cannot be retried. + requestStream = options.request(reqOpts); + dup.setWritable(requestStream); + } + // Replay the Request events back to the stream. + requestStream + .on('error', dup.destroy.bind(dup)) + .on('response', dup.emit.bind(dup, 'response')) + .on('complete', dup.emit.bind(dup, 'complete')); + dup.abort = requestStream.abort; + return dup; + } + /** + * Decorate the options about to be made in a request. + * + * @param {object} reqOpts - The options to be passed to `request`. + * @param {string} projectId - The project ID. + * @return {object} reqOpts - The decorated reqOpts. + */ + decorateRequest(reqOpts, projectId) { + delete reqOpts.autoPaginate; + delete reqOpts.autoPaginateVal; + delete reqOpts.objectMode; + if (reqOpts.qs !== null && typeof reqOpts.qs === 'object') { + delete reqOpts.qs.autoPaginate; + delete reqOpts.qs.autoPaginateVal; + reqOpts.qs = (0, projectify_1.replaceProjectIdToken)(reqOpts.qs, projectId); + } + if (Array.isArray(reqOpts.multipart)) { + reqOpts.multipart = reqOpts.multipart.map(part => { + return (0, projectify_1.replaceProjectIdToken)(part, projectId); + }); + } + if (reqOpts.json !== null && typeof reqOpts.json === 'object') { + delete reqOpts.json.autoPaginate; + delete reqOpts.json.autoPaginateVal; + reqOpts.json = (0, projectify_1.replaceProjectIdToken)(reqOpts.json, projectId); + } + reqOpts.uri = (0, projectify_1.replaceProjectIdToken)(reqOpts.uri, projectId); + return reqOpts; + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + isCustomType(unknown, module) { + function getConstructorName(obj) { + return obj.constructor && obj.constructor.name.toLowerCase(); + } + const moduleNameParts = module.split('/'); + const parentModuleName = moduleNameParts[0] && moduleNameParts[0].toLowerCase(); + const subModuleName = moduleNameParts[1] && moduleNameParts[1].toLowerCase(); + if (subModuleName && getConstructorName(unknown) !== subModuleName) { + return false; + } + let walkingModule = unknown; + // eslint-disable-next-line no-constant-condition + while (true) { + if (getConstructorName(walkingModule) === parentModuleName) { + return true; + } + walkingModule = walkingModule.parent; + if (!walkingModule) { + return false; + } + } + } + /** + * Given two parameters, figure out if this is either: + * - Just a callback function + * - An options object, and then a callback function + * @param optionsOrCallback An options object or callback. + * @param cb A potentially undefined callback. + */ + maybeOptionsOrCallback(optionsOrCallback, cb) { + return typeof optionsOrCallback === 'function' + ? [{}, optionsOrCallback] + : [optionsOrCallback, cb]; + } + _getDefaultHeaders(gcclGcsCmd) { + const headers = { + 'User-Agent': (0, util_js_1.getUserAgentString)(), + 'x-goog-api-client': `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${uuid.v4()}`, + }; + if (gcclGcsCmd) { + headers['x-goog-api-client'] += ` gccl-gcs-cmd/${gcclGcsCmd}`; + } + return headers; + } +} +exports.Util = Util; +/** + * Basic Passthrough Stream that records the number of bytes read + * every time the cursor is moved. + */ +class ProgressStream extends stream_1.Transform { + constructor() { + super(...arguments); + this.bytesRead = 0; + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + _transform(chunk, encoding, callback) { + this.bytesRead += chunk.length; + this.emit('progress', { bytesWritten: this.bytesRead, contentLength: '*' }); + this.push(chunk); + callback(); + } +} +const util = new Util(); +exports.util = util; + + +/***/ }), + +/***/ 8598: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Notification = void 0; +const index_js_1 = __nccwpck_require__(1325); +const promisify_1 = __nccwpck_require__(206); +/** + * The API-formatted resource description of the notification. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name Notification#metadata + * @type {object} + */ +/** + * A Notification object is created from your {@link Bucket} object using + * {@link Bucket#notification}. Use it to interact with Cloud Pub/Sub + * notifications. + * + * See {@link https://cloud.google.com/storage/docs/pubsub-notifications| Cloud Pub/Sub Notifications for Google Cloud Storage} + * + * @class + * @hideconstructor + * + * @param {Bucket} bucket The bucket instance this notification is attached to. + * @param {string} id The ID of the notification. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const notification = myBucket.notification('1'); + * ``` + */ +class Notification extends index_js_1.ServiceObject { + constructor(bucket, id) { + const requestQueryObject = {}; + const methods = { + /** + * Creates a notification subscription for the bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/insert| Notifications: insert} + * @method Notification#create + * + * @param {Topic|string} topic The Cloud PubSub topic to which this + * subscription publishes. If the project ID is omitted, the current + * project ID will be used. + * + * Acceptable formats are: + * - `projects/grape-spaceship-123/topics/my-topic` + * + * - `my-topic` + * @param {CreateNotificationRequest} [options] Metadata to set for + * the notification. + * @param {CreateNotificationCallback} [callback] Callback function. + * @returns {Promise} + * @throws {Error} If a valid topic is not provided. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.create(function(err, notification, apiResponse) { + * if (!err) { + * // The notification was created successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.create().then(function(data) { + * const notification = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + create: true, + /** + * @typedef {array} DeleteNotificationResponse + * @property {object} 0 The full API response. + */ + /** + * Permanently deletes a notification subscription. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/delete| Notifications: delete API Documentation} + * + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {DeleteNotificationCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.delete(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.delete().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/deleteNotification.js + * region_tag:storage_delete_bucket_notification + * Another example: + */ + delete: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * Get a notification and its metadata if it exists. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/get| Notifications: get API Documentation} + * + * @param {object} [options] Configuration options. + * See {@link Bucket#createNotification} for create options. + * @param {boolean} [options.autoCreate] Automatically create the object if + * it does not exist. Default: `false`. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetNotificationCallback} [callback] Callback function. + * @return {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.get(function(err, notification, apiResponse) { + * // `notification.metadata` has been populated. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.get().then(function(data) { + * const notification = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + get: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * Get the notification's metadata. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/get| Notifications: get API Documentation} + * + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetNotificationMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.getMetadata(function(err, metadata, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.getMetadata().then(function(data) { + * const metadata = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/getMetadataNotifications.js + * region_tag:storage_print_pubsub_bucket_notification + * Another example: + */ + getMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} NotificationExistsResponse + * @property {boolean} 0 Whether the notification exists or not. + */ + /** + * @callback NotificationExistsCallback + * @param {?Error} err Request error, if any. + * @param {boolean} exists Whether the notification exists or not. + */ + /** + * Check if the notification exists. + * + * @method Notification#exists + * @param {NotificationExistsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.exists(function(err, exists) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.exists().then(function(data) { + * const exists = data[0]; + * }); + * ``` + */ + exists: true, + }; + super({ + parent: bucket, + baseUrl: '/notificationConfigs', + id: id.toString(), + createMethod: bucket.createNotification.bind(bucket), + methods, + }); + } +} +exports.Notification = Notification; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Notification); + + +/***/ }), + +/***/ 8043: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +var _Upload_instances, _Upload_gcclGcsCmd, _Upload_resetLocalBuffersCache, _Upload_addLocalBufferCache; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Upload = exports.PROTOCOL_REGEX = void 0; +exports.upload = upload; +exports.createURI = createURI; +exports.checkUploadStatus = checkUploadStatus; +const abort_controller_1 = __importDefault(__nccwpck_require__(7413)); +const crypto_1 = __nccwpck_require__(6982); +const gaxios = __importStar(__nccwpck_require__(7003)); +const google_auth_library_1 = __nccwpck_require__(492); +const stream_1 = __nccwpck_require__(2203); +const async_retry_1 = __importDefault(__nccwpck_require__(5195)); +const uuid = __importStar(__nccwpck_require__(2048)); +const util_js_1 = __nccwpck_require__(4557); +const util_js_2 = __nccwpck_require__(7325); +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +const package_json_helper_cjs_1 = __nccwpck_require__(1969); +const NOT_FOUND_STATUS_CODE = 404; +const RESUMABLE_INCOMPLETE_STATUS_CODE = 308; +const packageJson = (0, package_json_helper_cjs_1.getPackageJSON)(); +exports.PROTOCOL_REGEX = /^(\w*):\/\//; +class Upload extends stream_1.Writable { + constructor(cfg) { + var _a; + super(cfg); + _Upload_instances.add(this); + this.numBytesWritten = 0; + this.numRetries = 0; + this.currentInvocationId = { + checkUploadStatus: uuid.v4(), + chunk: uuid.v4(), + uri: uuid.v4(), + }; + /** + * A cache of buffers written to this instance, ready for consuming + */ + this.writeBuffers = []; + this.numChunksReadInRequest = 0; + /** + * An array of buffers used for caching the most recent upload chunk. + * We should not assume that the server received all bytes sent in the request. + * - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + */ + this.localWriteCache = []; + this.localWriteCacheByteLength = 0; + this.upstreamEnded = false; + _Upload_gcclGcsCmd.set(this, void 0); + cfg = cfg || {}; + if (!cfg.bucket || !cfg.file) { + throw new Error('A bucket and file name are required'); + } + if (cfg.offset && !cfg.uri) { + throw new RangeError('Cannot provide an `offset` without providing a `uri`'); + } + if (cfg.isPartialUpload && !cfg.chunkSize) { + throw new RangeError('Cannot set `isPartialUpload` without providing a `chunkSize`'); + } + cfg.authConfig = cfg.authConfig || {}; + cfg.authConfig.scopes = [ + 'https://www.googleapis.com/auth/devstorage.full_control', + ]; + this.authClient = cfg.authClient || new google_auth_library_1.GoogleAuth(cfg.authConfig); + const universe = cfg.universeDomain || google_auth_library_1.DEFAULT_UNIVERSE; + this.apiEndpoint = `https://storage.${universe}`; + if (cfg.apiEndpoint && cfg.apiEndpoint !== this.apiEndpoint) { + this.apiEndpoint = this.sanitizeEndpoint(cfg.apiEndpoint); + const hostname = new URL(this.apiEndpoint).hostname; + // check if it is a domain of a known universe + const isDomain = hostname === universe; + const isDefaultUniverseDomain = hostname === google_auth_library_1.DEFAULT_UNIVERSE; + // check if it is a subdomain of a known universe + // by checking a last (universe's length + 1) of a hostname + const isSubDomainOfUniverse = hostname.slice(-(universe.length + 1)) === `.${universe}`; + const isSubDomainOfDefaultUniverse = hostname.slice(-(google_auth_library_1.DEFAULT_UNIVERSE.length + 1)) === + `.${google_auth_library_1.DEFAULT_UNIVERSE}`; + if (!isDomain && + !isDefaultUniverseDomain && + !isSubDomainOfUniverse && + !isSubDomainOfDefaultUniverse) { + // a custom, non-universe domain, + // use gaxios + this.authClient = gaxios; + } + } + this.baseURI = `${this.apiEndpoint}/upload/storage/v1/b`; + this.bucket = cfg.bucket; + const cacheKeyElements = [cfg.bucket, cfg.file]; + if (typeof cfg.generation === 'number') { + cacheKeyElements.push(`${cfg.generation}`); + } + this.cacheKey = cacheKeyElements.join('/'); + this.customRequestOptions = cfg.customRequestOptions || {}; + this.file = cfg.file; + this.generation = cfg.generation; + this.kmsKeyName = cfg.kmsKeyName; + this.metadata = cfg.metadata || {}; + this.offset = cfg.offset; + this.origin = cfg.origin; + this.params = cfg.params || {}; + this.userProject = cfg.userProject; + this.chunkSize = cfg.chunkSize; + this.retryOptions = cfg.retryOptions; + this.isPartialUpload = (_a = cfg.isPartialUpload) !== null && _a !== void 0 ? _a : false; + if (cfg.key) { + if (typeof cfg.key === 'string') { + const base64Key = Buffer.from(cfg.key).toString('base64'); + this.encryption = { + key: base64Key, + hash: (0, crypto_1.createHash)('sha256').update(cfg.key).digest('base64'), + }; + } + else { + const base64Key = cfg.key.toString('base64'); + this.encryption = { + key: base64Key, + hash: (0, crypto_1.createHash)('sha256').update(cfg.key).digest('base64'), + }; + } + } + this.predefinedAcl = cfg.predefinedAcl; + if (cfg.private) + this.predefinedAcl = 'private'; + if (cfg.public) + this.predefinedAcl = 'publicRead'; + const autoRetry = cfg.retryOptions.autoRetry; + this.uriProvidedManually = !!cfg.uri; + this.uri = cfg.uri; + if (this.offset) { + // we're resuming an incomplete upload + this.numBytesWritten = this.offset; + } + this.numRetries = 0; // counter for number of retries currently executed + if (!autoRetry) { + cfg.retryOptions.maxRetries = 0; + } + this.timeOfFirstRequest = Date.now(); + const contentLength = cfg.metadata + ? Number(cfg.metadata.contentLength) + : NaN; + this.contentLength = isNaN(contentLength) ? '*' : contentLength; + __classPrivateFieldSet(this, _Upload_gcclGcsCmd, cfg[util_js_2.GCCL_GCS_CMD_KEY], "f"); + this.once('writing', () => { + if (this.uri) { + this.continueUploading(); + } + else { + this.createURI(err => { + if (err) { + return this.destroy(err); + } + this.startUploading(); + return; + }); + } + }); + } + /** + * Prevent 'finish' event until the upload has succeeded. + * + * @param fireFinishEvent The finish callback + */ + _final(fireFinishEvent = () => { }) { + this.upstreamEnded = true; + this.once('uploadFinished', fireFinishEvent); + process.nextTick(() => { + this.emit('upstreamFinished'); + // it's possible `_write` may not be called - namely for empty object uploads + this.emit('writing'); + }); + } + /** + * Handles incoming data from upstream + * + * @param chunk The chunk to append to the buffer + * @param encoding The encoding of the chunk + * @param readCallback A callback for when the buffer has been read downstream + */ + _write(chunk, encoding, readCallback = () => { }) { + // Backwards-compatible event + this.emit('writing'); + this.writeBuffers.push(typeof chunk === 'string' ? Buffer.from(chunk, encoding) : chunk); + this.once('readFromChunkBuffer', readCallback); + process.nextTick(() => this.emit('wroteToChunkBuffer')); + } + /** + * Prepends the local buffer to write buffer and resets it. + * + * @param keepLastBytes number of bytes to keep from the end of the local buffer. + */ + prependLocalBufferToUpstream(keepLastBytes) { + // Typically, the upstream write buffers should be smaller than the local + // cache, so we can save time by setting the local cache as the new + // upstream write buffer array and appending the old array to it + let initialBuffers = []; + if (keepLastBytes) { + // we only want the last X bytes + let bytesKept = 0; + while (keepLastBytes > bytesKept) { + // load backwards because we want the last X bytes + // note: `localWriteCacheByteLength` is reset below + let buf = this.localWriteCache.pop(); + if (!buf) + break; + bytesKept += buf.byteLength; + if (bytesKept > keepLastBytes) { + // we have gone over the amount desired, let's keep the last X bytes + // of this buffer + const diff = bytesKept - keepLastBytes; + buf = buf.subarray(diff); + bytesKept -= diff; + } + initialBuffers.unshift(buf); + } + } + else { + // we're keeping all of the local cache, simply use it as the initial buffer + initialBuffers = this.localWriteCache; + } + // Append the old upstream to the new + const append = this.writeBuffers; + this.writeBuffers = initialBuffers; + for (const buf of append) { + this.writeBuffers.push(buf); + } + // reset last buffers sent + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); + } + /** + * Retrieves data from upstream's buffer. + * + * @param limit The maximum amount to return from the buffer. + */ + *pullFromChunkBuffer(limit) { + while (limit) { + const buf = this.writeBuffers.shift(); + if (!buf) + break; + let bufToYield = buf; + if (buf.byteLength > limit) { + bufToYield = buf.subarray(0, limit); + this.writeBuffers.unshift(buf.subarray(limit)); + limit = 0; + } + else { + limit -= buf.byteLength; + } + yield bufToYield; + // Notify upstream we've read from the buffer and we're able to consume + // more. It can also potentially send more data down as we're currently + // iterating. + this.emit('readFromChunkBuffer'); + } + } + /** + * A handler for determining if data is ready to be read from upstream. + * + * @returns If there will be more chunks to read in the future + */ + async waitForNextChunk() { + const willBeMoreChunks = await new Promise(resolve => { + // There's data available - it should be digested + if (this.writeBuffers.length) { + return resolve(true); + } + // The upstream writable ended, we shouldn't expect any more data. + if (this.upstreamEnded) { + return resolve(false); + } + // Nothing immediate seems to be determined. We need to prepare some + // listeners to determine next steps... + const wroteToChunkBufferCallback = () => { + removeListeners(); + return resolve(true); + }; + const upstreamFinishedCallback = () => { + removeListeners(); + // this should be the last chunk, if there's anything there + if (this.writeBuffers.length) + return resolve(true); + return resolve(false); + }; + // Remove listeners when we're ready to callback. + const removeListeners = () => { + this.removeListener('wroteToChunkBuffer', wroteToChunkBufferCallback); + this.removeListener('upstreamFinished', upstreamFinishedCallback); + }; + // If there's data recently written it should be digested + this.once('wroteToChunkBuffer', wroteToChunkBufferCallback); + // If the upstream finishes let's see if there's anything to grab + this.once('upstreamFinished', upstreamFinishedCallback); + }); + return willBeMoreChunks; + } + /** + * Reads data from upstream up to the provided `limit`. + * Ends when the limit has reached or no data is expected to be pushed from upstream. + * + * @param limit The most amount of data this iterator should return. `Infinity` by default. + */ + async *upstreamIterator(limit = Infinity) { + // read from upstream chunk buffer + while (limit && (await this.waitForNextChunk())) { + // read until end or limit has been reached + for (const chunk of this.pullFromChunkBuffer(limit)) { + limit -= chunk.byteLength; + yield chunk; + } + } + } + createURI(callback) { + if (!callback) { + return this.createURIAsync(); + } + this.createURIAsync().then(r => callback(null, r), callback); + } + async createURIAsync() { + const metadata = { ...this.metadata }; + const headers = {}; + // Delete content length and content type from metadata if they exist. + // These are headers and should not be sent as part of the metadata. + if (metadata.contentLength) { + headers['X-Upload-Content-Length'] = metadata.contentLength.toString(); + delete metadata.contentLength; + } + if (metadata.contentType) { + headers['X-Upload-Content-Type'] = metadata.contentType; + delete metadata.contentType; + } + let googAPIClient = `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${this.currentInvocationId.uri}`; + if (__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")) { + googAPIClient += ` gccl-gcs-cmd/${__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")}`; + } + // Check if headers already exist before creating new ones + const reqOpts = { + method: 'POST', + url: [this.baseURI, this.bucket, 'o'].join('/'), + params: Object.assign({ + name: this.file, + uploadType: 'resumable', + }, this.params), + data: metadata, + headers: { + 'User-Agent': (0, util_js_1.getUserAgentString)(), + 'x-goog-api-client': googAPIClient, + ...headers, + }, + }; + if (metadata.contentLength) { + reqOpts.headers['X-Upload-Content-Length'] = + metadata.contentLength.toString(); + } + if (metadata.contentType) { + reqOpts.headers['X-Upload-Content-Type'] = metadata.contentType; + } + if (typeof this.generation !== 'undefined') { + reqOpts.params.ifGenerationMatch = this.generation; + } + if (this.kmsKeyName) { + reqOpts.params.kmsKeyName = this.kmsKeyName; + } + if (this.predefinedAcl) { + reqOpts.params.predefinedAcl = this.predefinedAcl; + } + if (this.origin) { + reqOpts.headers.Origin = this.origin; + } + const uri = await (0, async_retry_1.default)(async (bail) => { + var _a, _b, _c; + try { + const res = await this.makeRequest(reqOpts); + // We have successfully got a URI we can now create a new invocation id + this.currentInvocationId.uri = uuid.v4(); + return res.headers.location; + } + catch (err) { + const e = err; + const apiError = { + code: (_a = e.response) === null || _a === void 0 ? void 0 : _a.status, + name: (_b = e.response) === null || _b === void 0 ? void 0 : _b.statusText, + message: (_c = e.response) === null || _c === void 0 ? void 0 : _c.statusText, + errors: [ + { + reason: e.code, + }, + ], + }; + if (this.retryOptions.maxRetries > 0 && + this.retryOptions.retryableErrorFn(apiError)) { + throw e; + } + else { + return bail(e); + } + } + }, { + retries: this.retryOptions.maxRetries, + factor: this.retryOptions.retryDelayMultiplier, + maxTimeout: this.retryOptions.maxRetryDelay * 1000, //convert to milliseconds + maxRetryTime: this.retryOptions.totalTimeout * 1000, //convert to milliseconds + }); + this.uri = uri; + this.offset = 0; + // emit the newly generated URI for future reuse, if necessary. + this.emit('uri', uri); + return uri; + } + async continueUploading() { + var _a; + (_a = this.offset) !== null && _a !== void 0 ? _a : (await this.getAndSetOffset()); + return this.startUploading(); + } + async startUploading() { + const multiChunkMode = !!this.chunkSize; + let responseReceived = false; + this.numChunksReadInRequest = 0; + if (!this.offset) { + this.offset = 0; + } + // Check if the offset (server) is too far behind the current stream + if (this.offset < this.numBytesWritten) { + const delta = this.numBytesWritten - this.offset; + const message = `The offset is lower than the number of bytes written. The server has ${this.offset} bytes and while ${this.numBytesWritten} bytes has been uploaded - thus ${delta} bytes are missing. Stopping as this could result in data loss. Initiate a new upload to continue.`; + this.emit('error', new RangeError(message)); + return; + } + // Check if we should 'fast-forward' to the relevant data to upload + if (this.numBytesWritten < this.offset) { + // 'fast-forward' to the byte where we need to upload. + // only push data from the byte after the one we left off on + const fastForwardBytes = this.offset - this.numBytesWritten; + for await (const _chunk of this.upstreamIterator(fastForwardBytes)) { + _chunk; // discard the data up until the point we want + } + this.numBytesWritten = this.offset; + } + let expectedUploadSize = undefined; + // Set `expectedUploadSize` to `contentLength - this.numBytesWritten`, if available + if (typeof this.contentLength === 'number') { + expectedUploadSize = this.contentLength - this.numBytesWritten; + } + // `expectedUploadSize` should be no more than the `chunkSize`. + // It's possible this is the last chunk request for a multiple + // chunk upload, thus smaller than the chunk size. + if (this.chunkSize) { + expectedUploadSize = expectedUploadSize + ? Math.min(this.chunkSize, expectedUploadSize) + : this.chunkSize; + } + // A queue for the upstream data + const upstreamQueue = this.upstreamIterator(expectedUploadSize); + // The primary read stream for this request. This stream retrieves no more + // than the exact requested amount from upstream. + const requestStream = new stream_1.Readable({ + read: async () => { + // Don't attempt to retrieve data upstream if we already have a response + if (responseReceived) + requestStream.push(null); + const result = await upstreamQueue.next(); + if (result.value) { + this.numChunksReadInRequest++; + if (multiChunkMode) { + // save ever buffer used in the request in multi-chunk mode + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_addLocalBufferCache).call(this, result.value); + } + else { + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_addLocalBufferCache).call(this, result.value); + } + this.numBytesWritten += result.value.byteLength; + this.emit('progress', { + bytesWritten: this.numBytesWritten, + contentLength: this.contentLength, + }); + requestStream.push(result.value); + } + if (result.done) { + requestStream.push(null); + } + }, + }); + let googAPIClient = `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${this.currentInvocationId.chunk}`; + if (__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")) { + googAPIClient += ` gccl-gcs-cmd/${__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")}`; + } + const headers = { + 'User-Agent': (0, util_js_1.getUserAgentString)(), + 'x-goog-api-client': googAPIClient, + }; + // If using multiple chunk upload, set appropriate header + if (multiChunkMode) { + // We need to know how much data is available upstream to set the `Content-Range` header. + // https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + for await (const chunk of this.upstreamIterator(expectedUploadSize)) { + // This will conveniently track and keep the size of the buffers. + // We will reach either the expected upload size or the remainder of the stream. + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_addLocalBufferCache).call(this, chunk); + } + // This is the sum from the `#addLocalBufferCache` calls + const bytesToUpload = this.localWriteCacheByteLength; + // Important: we want to know if the upstream has ended and the queue is empty before + // unshifting data back into the queue. This way we will know if this is the last request or not. + const isLastChunkOfUpload = !(await this.waitForNextChunk()); + // Important: put the data back in the queue for the actual upload + this.prependLocalBufferToUpstream(); + let totalObjectSize = this.contentLength; + if (typeof this.contentLength !== 'number' && + isLastChunkOfUpload && + !this.isPartialUpload) { + // Let's let the server know this is the last chunk of the object since we didn't set it before. + totalObjectSize = bytesToUpload + this.numBytesWritten; + } + // `- 1` as the ending byte is inclusive in the request. + const endingByte = bytesToUpload + this.numBytesWritten - 1; + // `Content-Length` for multiple chunk uploads is the size of the chunk, + // not the overall object + headers['Content-Length'] = bytesToUpload; + headers['Content-Range'] = + `bytes ${this.offset}-${endingByte}/${totalObjectSize}`; + } + else { + headers['Content-Range'] = `bytes ${this.offset}-*/${this.contentLength}`; + } + const reqOpts = { + method: 'PUT', + url: this.uri, + headers, + body: requestStream, + }; + try { + const resp = await this.makeRequestStream(reqOpts); + if (resp) { + responseReceived = true; + await this.responseHandler(resp); + } + } + catch (e) { + const err = e; + if (this.retryOptions.retryableErrorFn(err)) { + this.attemptDelayedRetry({ + status: NaN, + data: err, + }); + return; + } + this.destroy(err); + } + } + // Process the API response to look for errors that came in + // the response body. + async responseHandler(resp) { + if (resp.data.error) { + this.destroy(resp.data.error); + return; + } + // At this point we can safely create a new id for the chunk + this.currentInvocationId.chunk = uuid.v4(); + const moreDataToUpload = await this.waitForNextChunk(); + const shouldContinueWithNextMultiChunkRequest = this.chunkSize && + resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE && + resp.headers.range && + moreDataToUpload; + /** + * This is true when we're expecting to upload more data in a future request, + * yet the upstream for the upload session has been exhausted. + */ + const shouldContinueUploadInAnotherRequest = this.isPartialUpload && + resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE && + !moreDataToUpload; + if (shouldContinueWithNextMultiChunkRequest) { + // Use the upper value in this header to determine where to start the next chunk. + // We should not assume that the server received all bytes sent in the request. + // https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + const range = resp.headers.range; + this.offset = Number(range.split('-')[1]) + 1; + // We should not assume that the server received all bytes sent in the request. + // - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + const missingBytes = this.numBytesWritten - this.offset; + if (missingBytes) { + // As multi-chunk uploads send one chunk per request and pulls one + // chunk into the pipeline, prepending the missing bytes back should + // be fine for the next request. + this.prependLocalBufferToUpstream(missingBytes); + this.numBytesWritten -= missingBytes; + } + else { + // No bytes missing - no need to keep the local cache + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); + } + // continue uploading next chunk + this.continueUploading(); + } + else if (!this.isSuccessfulResponse(resp.status) && + !shouldContinueUploadInAnotherRequest) { + const err = new Error('Upload failed'); + err.code = resp.status; + err.name = 'Upload failed'; + if (resp === null || resp === void 0 ? void 0 : resp.data) { + err.errors = [resp === null || resp === void 0 ? void 0 : resp.data]; + } + this.destroy(err); + } + else { + // no need to keep the cache + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); + if (resp && resp.data) { + resp.data.size = Number(resp.data.size); + } + this.emit('metadata', resp.data); + // Allow the object (Upload) to continue naturally so the user's + // "finish" event fires. + this.emit('uploadFinished'); + } + } + /** + * Check the status of an existing resumable upload. + * + * @param cfg A configuration to use. `uri` is required. + * @returns the current upload status + */ + async checkUploadStatus(config = {}) { + let googAPIClient = `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${this.currentInvocationId.checkUploadStatus}`; + if (__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")) { + googAPIClient += ` gccl-gcs-cmd/${__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")}`; + } + const opts = { + method: 'PUT', + url: this.uri, + headers: { + 'Content-Length': 0, + 'Content-Range': 'bytes */*', + 'User-Agent': (0, util_js_1.getUserAgentString)(), + 'x-goog-api-client': googAPIClient, + }, + }; + try { + const resp = await this.makeRequest(opts); + // Successfully got the offset we can now create a new offset invocation id + this.currentInvocationId.checkUploadStatus = uuid.v4(); + return resp; + } + catch (e) { + if (config.retry === false || + !(e instanceof Error) || + !this.retryOptions.retryableErrorFn(e)) { + throw e; + } + const retryDelay = this.getRetryDelay(); + if (retryDelay <= 0) { + throw e; + } + await new Promise(res => setTimeout(res, retryDelay)); + return this.checkUploadStatus(config); + } + } + async getAndSetOffset() { + try { + // we want to handle retries in this method. + const resp = await this.checkUploadStatus({ retry: false }); + if (resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE) { + if (typeof resp.headers.range === 'string') { + this.offset = Number(resp.headers.range.split('-')[1]) + 1; + return; + } + } + this.offset = 0; + } + catch (e) { + const err = e; + if (this.retryOptions.retryableErrorFn(err)) { + this.attemptDelayedRetry({ + status: NaN, + data: err, + }); + return; + } + this.destroy(err); + } + } + async makeRequest(reqOpts) { + if (this.encryption) { + reqOpts.headers = reqOpts.headers || {}; + reqOpts.headers['x-goog-encryption-algorithm'] = 'AES256'; + reqOpts.headers['x-goog-encryption-key'] = this.encryption.key.toString(); + reqOpts.headers['x-goog-encryption-key-sha256'] = + this.encryption.hash.toString(); + } + if (this.userProject) { + reqOpts.params = reqOpts.params || {}; + reqOpts.params.userProject = this.userProject; + } + // Let gaxios know we will handle a 308 error code ourselves. + reqOpts.validateStatus = (status) => { + return (this.isSuccessfulResponse(status) || + status === RESUMABLE_INCOMPLETE_STATUS_CODE); + }; + const combinedReqOpts = { + ...this.customRequestOptions, + ...reqOpts, + headers: { + ...this.customRequestOptions.headers, + ...reqOpts.headers, + }, + }; + const res = await this.authClient.request(combinedReqOpts); + if (res.data && res.data.error) { + throw res.data.error; + } + return res; + } + async makeRequestStream(reqOpts) { + const controller = new abort_controller_1.default(); + const errorCallback = () => controller.abort(); + this.once('error', errorCallback); + if (this.userProject) { + reqOpts.params = reqOpts.params || {}; + reqOpts.params.userProject = this.userProject; + } + reqOpts.signal = controller.signal; + reqOpts.validateStatus = () => true; + const combinedReqOpts = { + ...this.customRequestOptions, + ...reqOpts, + headers: { + ...this.customRequestOptions.headers, + ...reqOpts.headers, + }, + }; + const res = await this.authClient.request(combinedReqOpts); + const successfulRequest = this.onResponse(res); + this.removeListener('error', errorCallback); + return successfulRequest ? res : null; + } + /** + * @return {bool} is the request good? + */ + onResponse(resp) { + if (resp.status !== 200 && + this.retryOptions.retryableErrorFn({ + code: resp.status, + message: resp.statusText, + name: resp.statusText, + })) { + this.attemptDelayedRetry(resp); + return false; + } + this.emit('response', resp); + return true; + } + /** + * @param resp GaxiosResponse object from previous attempt + */ + attemptDelayedRetry(resp) { + if (this.numRetries < this.retryOptions.maxRetries) { + if (resp.status === NOT_FOUND_STATUS_CODE && + this.numChunksReadInRequest === 0) { + this.startUploading(); + } + else { + const retryDelay = this.getRetryDelay(); + if (retryDelay <= 0) { + this.destroy(new Error(`Retry total time limit exceeded - ${JSON.stringify(resp.data)}`)); + return; + } + // Unshift the local cache back in case it's needed for the next request. + this.numBytesWritten -= this.localWriteCacheByteLength; + this.prependLocalBufferToUpstream(); + // We don't know how much data has been received by the server. + // `continueUploading` will recheck the offset via `getAndSetOffset`. + // If `offset` < `numberBytesReceived` then we will raise a RangeError + // as we've streamed too much data that has been missed - this should + // not be the case for multi-chunk uploads as `lastChunkSent` is the + // body of the entire request. + this.offset = undefined; + setTimeout(this.continueUploading.bind(this), retryDelay); + } + this.numRetries++; + } + else { + this.destroy(new Error(`Retry limit exceeded - ${JSON.stringify(resp.data)}`)); + } + } + /** + * The amount of time to wait before retrying the request, in milliseconds. + * If negative, do not retry. + * + * @returns the amount of time to wait, in milliseconds. + */ + getRetryDelay() { + const randomMs = Math.round(Math.random() * 1000); + const waitTime = Math.pow(this.retryOptions.retryDelayMultiplier, this.numRetries) * + 1000 + + randomMs; + const maxAllowableDelayMs = this.retryOptions.totalTimeout * 1000 - + (Date.now() - this.timeOfFirstRequest); + const maxRetryDelayMs = this.retryOptions.maxRetryDelay * 1000; + return Math.min(waitTime, maxRetryDelayMs, maxAllowableDelayMs); + } + /* + * Prepare user-defined API endpoint for compatibility with our API. + */ + sanitizeEndpoint(url) { + if (!exports.PROTOCOL_REGEX.test(url)) { + url = `https://${url}`; + } + return url.replace(/\/+$/, ''); // Remove trailing slashes + } + /** + * Check if a given status code is 2xx + * + * @param status The status code to check + * @returns if the status is 2xx + */ + isSuccessfulResponse(status) { + return status >= 200 && status < 300; + } +} +exports.Upload = Upload; +_Upload_gcclGcsCmd = new WeakMap(), _Upload_instances = new WeakSet(), _Upload_resetLocalBuffersCache = function _Upload_resetLocalBuffersCache() { + this.localWriteCache = []; + this.localWriteCacheByteLength = 0; +}, _Upload_addLocalBufferCache = function _Upload_addLocalBufferCache(buf) { + this.localWriteCache.push(buf); + this.localWriteCacheByteLength += buf.byteLength; +}; +function upload(cfg) { + return new Upload(cfg); +} +function createURI(cfg, callback) { + const up = new Upload(cfg); + if (!callback) { + return up.createURI(); + } + up.createURI().then(r => callback(null, r), callback); +} +/** + * Check the status of an existing resumable upload. + * + * @param cfg A configuration to use. `uri` is required. + * @returns the current upload status + */ +function checkUploadStatus(cfg) { + const up = new Upload(cfg); + return up.checkUploadStatus(); +} + + +/***/ }), + +/***/ 7319: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SigningError = exports.URLSigner = exports.PATH_STYLED_HOST = exports.SignerExceptionMessages = void 0; +const crypto = __importStar(__nccwpck_require__(6982)); +const url = __importStar(__nccwpck_require__(7016)); +const storage_js_1 = __nccwpck_require__(2848); +const util_js_1 = __nccwpck_require__(4557); +var SignerExceptionMessages; +(function (SignerExceptionMessages) { + SignerExceptionMessages["ACCESSIBLE_DATE_INVALID"] = "The accessible at date provided was invalid."; + SignerExceptionMessages["EXPIRATION_BEFORE_ACCESSIBLE_DATE"] = "An expiration date cannot be before accessible date."; + SignerExceptionMessages["X_GOOG_CONTENT_SHA256"] = "The header X-Goog-Content-SHA256 must be a hexadecimal string."; +})(SignerExceptionMessages || (exports.SignerExceptionMessages = SignerExceptionMessages = {})); +/* + * Default signing version for getSignedUrl is 'v2'. + */ +const DEFAULT_SIGNING_VERSION = 'v2'; +const SEVEN_DAYS = 7 * 24 * 60 * 60; +/** + * @const {string} + * @deprecated - unused + */ +exports.PATH_STYLED_HOST = 'https://storage.googleapis.com'; +class URLSigner { + constructor(auth, bucket, file, + /** + * A {@link Storage} object. + * + * @privateRemarks + * + * Technically this is a required field, however it would be a breaking change to + * move it before optional properties. In the next major we should refactor the + * constructor of this class to only accept a config object. + */ + storage = new storage_js_1.Storage()) { + this.auth = auth; + this.bucket = bucket; + this.file = file; + this.storage = storage; + } + getSignedUrl(cfg) { + const expiresInSeconds = this.parseExpires(cfg.expires); + const method = cfg.method; + const accessibleAtInSeconds = this.parseAccessibleAt(cfg.accessibleAt); + if (expiresInSeconds < accessibleAtInSeconds) { + throw new Error(SignerExceptionMessages.EXPIRATION_BEFORE_ACCESSIBLE_DATE); + } + let customHost; + // Default style is `path`. + const isVirtualHostedStyle = cfg.virtualHostedStyle || false; + if (cfg.cname) { + customHost = cfg.cname; + } + else if (isVirtualHostedStyle) { + customHost = `https://${this.bucket.name}.storage.${this.storage.universeDomain}`; + } + const secondsToMilliseconds = 1000; + const config = Object.assign({}, cfg, { + method, + expiration: expiresInSeconds, + accessibleAt: new Date(secondsToMilliseconds * accessibleAtInSeconds), + bucket: this.bucket.name, + file: this.file ? (0, util_js_1.encodeURI)(this.file.name, false) : undefined, + }); + if (customHost) { + config.cname = customHost; + } + const version = cfg.version || DEFAULT_SIGNING_VERSION; + let promise; + if (version === 'v2') { + promise = this.getSignedUrlV2(config); + } + else if (version === 'v4') { + promise = this.getSignedUrlV4(config); + } + else { + throw new Error(`Invalid signed URL version: ${version}. Supported versions are 'v2' and 'v4'.`); + } + return promise.then(query => { + var _a; + query = Object.assign(query, cfg.queryParams); + const signedUrl = new url.URL(((_a = cfg.host) === null || _a === void 0 ? void 0 : _a.toString()) || config.cname || this.storage.apiEndpoint); + signedUrl.pathname = this.getResourcePath(!!config.cname, this.bucket.name, config.file); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + signedUrl.search = (0, util_js_1.qsStringify)(query); + return signedUrl.href; + }); + } + getSignedUrlV2(config) { + const canonicalHeadersString = this.getCanonicalHeaders(config.extensionHeaders || {}); + const resourcePath = this.getResourcePath(false, config.bucket, config.file); + const blobToSign = [ + config.method, + config.contentMd5 || '', + config.contentType || '', + config.expiration, + canonicalHeadersString + resourcePath, + ].join('\n'); + const sign = async () => { + var _a; + const auth = this.auth; + try { + const signature = await auth.sign(blobToSign, (_a = config.signingEndpoint) === null || _a === void 0 ? void 0 : _a.toString()); + const credentials = await auth.getCredentials(); + return { + GoogleAccessId: credentials.client_email, + Expires: config.expiration, + Signature: signature, + }; + } + catch (err) { + const error = err; + const signingErr = new SigningError(error.message); + signingErr.stack = error.stack; + throw signingErr; + } + }; + return sign(); + } + getSignedUrlV4(config) { + var _a; + config.accessibleAt = config.accessibleAt + ? config.accessibleAt + : new Date(); + const millisecondsToSeconds = 1.0 / 1000.0; + const expiresPeriodInSeconds = config.expiration - config.accessibleAt.valueOf() * millisecondsToSeconds; + // v4 limit expiration to be 7 days maximum + if (expiresPeriodInSeconds > SEVEN_DAYS) { + throw new Error(`Max allowed expiration is seven days (${SEVEN_DAYS} seconds).`); + } + const extensionHeaders = Object.assign({}, config.extensionHeaders); + const fqdn = new url.URL(((_a = config.host) === null || _a === void 0 ? void 0 : _a.toString()) || config.cname || this.storage.apiEndpoint); + extensionHeaders.host = fqdn.hostname; + if (config.contentMd5) { + extensionHeaders['content-md5'] = config.contentMd5; + } + if (config.contentType) { + extensionHeaders['content-type'] = config.contentType; + } + let contentSha256; + const sha256Header = extensionHeaders['x-goog-content-sha256']; + if (sha256Header) { + if (typeof sha256Header !== 'string' || + !/[A-Fa-f0-9]{40}/.test(sha256Header)) { + throw new Error(SignerExceptionMessages.X_GOOG_CONTENT_SHA256); + } + contentSha256 = sha256Header; + } + const signedHeaders = Object.keys(extensionHeaders) + .map(header => header.toLowerCase()) + .sort() + .join(';'); + const extensionHeadersString = this.getCanonicalHeaders(extensionHeaders); + const datestamp = (0, util_js_1.formatAsUTCISO)(config.accessibleAt); + const credentialScope = `${datestamp}/auto/storage/goog4_request`; + const sign = async () => { + var _a; + const credentials = await this.auth.getCredentials(); + const credential = `${credentials.client_email}/${credentialScope}`; + const dateISO = (0, util_js_1.formatAsUTCISO)(config.accessibleAt ? config.accessibleAt : new Date(), true); + const queryParams = { + 'X-Goog-Algorithm': 'GOOG4-RSA-SHA256', + 'X-Goog-Credential': credential, + 'X-Goog-Date': dateISO, + 'X-Goog-Expires': expiresPeriodInSeconds.toString(10), + 'X-Goog-SignedHeaders': signedHeaders, + ...(config.queryParams || {}), + }; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const canonicalQueryParams = this.getCanonicalQueryParams(queryParams); + const canonicalRequest = this.getCanonicalRequest(config.method, this.getResourcePath(!!config.cname, config.bucket, config.file), canonicalQueryParams, extensionHeadersString, signedHeaders, contentSha256); + const hash = crypto + .createHash('sha256') + .update(canonicalRequest) + .digest('hex'); + const blobToSign = [ + 'GOOG4-RSA-SHA256', + dateISO, + credentialScope, + hash, + ].join('\n'); + try { + const signature = await this.auth.sign(blobToSign, (_a = config.signingEndpoint) === null || _a === void 0 ? void 0 : _a.toString()); + const signatureHex = Buffer.from(signature, 'base64').toString('hex'); + const signedQuery = Object.assign({}, queryParams, { + 'X-Goog-Signature': signatureHex, + }); + return signedQuery; + } + catch (err) { + const error = err; + const signingErr = new SigningError(error.message); + signingErr.stack = error.stack; + throw signingErr; + } + }; + return sign(); + } + /** + * Create canonical headers for signing v4 url. + * + * The canonical headers for v4-signing a request demands header names are + * first lowercased, followed by sorting the header names. + * Then, construct the canonical headers part of the request: + * + ":" + Trim() + "\n" + * .. + * + ":" + Trim() + "\n" + * + * @param headers + * @private + */ + getCanonicalHeaders(headers) { + // Sort headers by their lowercased names + const sortedHeaders = (0, util_js_1.objectEntries)(headers) + // Convert header names to lowercase + .map(([headerName, value]) => [ + headerName.toLowerCase(), + value, + ]) + .sort((a, b) => a[0].localeCompare(b[0])); + return sortedHeaders + .filter(([, value]) => value !== undefined) + .map(([headerName, value]) => { + // - Convert Array (multi-valued header) into string, delimited by + // ',' (no space). + // - Trim leading and trailing spaces. + // - Convert sequential (2+) spaces into a single space + const canonicalValue = `${value}`.trim().replace(/\s{2,}/g, ' '); + return `${headerName}:${canonicalValue}\n`; + }) + .join(''); + } + getCanonicalRequest(method, path, query, headers, signedHeaders, contentSha256) { + return [ + method, + path, + query, + headers, + signedHeaders, + contentSha256 || 'UNSIGNED-PAYLOAD', + ].join('\n'); + } + getCanonicalQueryParams(query) { + return (0, util_js_1.objectEntries)(query) + .map(([key, value]) => [(0, util_js_1.encodeURI)(key, true), (0, util_js_1.encodeURI)(value, true)]) + .sort((a, b) => (a[0] < b[0] ? -1 : 1)) + .map(([key, value]) => `${key}=${value}`) + .join('&'); + } + getResourcePath(cname, bucket, file) { + if (cname) { + return '/' + (file || ''); + } + else if (file) { + return `/${bucket}/${file}`; + } + else { + return `/${bucket}`; + } + } + parseExpires(expires, current = new Date()) { + const expiresInMSeconds = new Date(expires).valueOf(); + if (isNaN(expiresInMSeconds)) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_INVALID); + } + if (expiresInMSeconds < current.valueOf()) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_PAST); + } + return Math.floor(expiresInMSeconds / 1000); // The API expects seconds. + } + parseAccessibleAt(accessibleAt) { + const accessibleAtInMSeconds = new Date(accessibleAt || new Date()).valueOf(); + if (isNaN(accessibleAtInMSeconds)) { + throw new Error(SignerExceptionMessages.ACCESSIBLE_DATE_INVALID); + } + return Math.floor(accessibleAtInMSeconds / 1000); // The API expects seconds. + } +} +exports.URLSigner = URLSigner; +/** + * Custom error type for errors related to getting signed errors and policies. + * + * @private + */ +class SigningError extends Error { + constructor() { + super(...arguments); + this.name = 'SigningError'; + } +} +exports.SigningError = SigningError; + + +/***/ }), + +/***/ 2848: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Storage = exports.RETRYABLE_ERR_FN_DEFAULT = exports.MAX_RETRY_DELAY_DEFAULT = exports.TOTAL_TIMEOUT_DEFAULT = exports.RETRY_DELAY_MULTIPLIER_DEFAULT = exports.MAX_RETRY_DEFAULT = exports.AUTO_RETRY_DEFAULT = exports.PROTOCOL_REGEX = exports.StorageExceptionMessages = exports.ExceptionMessages = exports.IdempotencyStrategy = void 0; +const index_js_1 = __nccwpck_require__(1325); +const paginator_1 = __nccwpck_require__(9469); +const promisify_1 = __nccwpck_require__(206); +const stream_1 = __nccwpck_require__(2203); +const bucket_js_1 = __nccwpck_require__(2887); +const channel_js_1 = __nccwpck_require__(2658); +const file_js_1 = __nccwpck_require__(9975); +const util_js_1 = __nccwpck_require__(4557); +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +const package_json_helper_cjs_1 = __nccwpck_require__(1969); +const hmacKey_js_1 = __nccwpck_require__(5891); +const crc32c_js_1 = __nccwpck_require__(4317); +const google_auth_library_1 = __nccwpck_require__(492); +var IdempotencyStrategy; +(function (IdempotencyStrategy) { + IdempotencyStrategy[IdempotencyStrategy["RetryAlways"] = 0] = "RetryAlways"; + IdempotencyStrategy[IdempotencyStrategy["RetryConditional"] = 1] = "RetryConditional"; + IdempotencyStrategy[IdempotencyStrategy["RetryNever"] = 2] = "RetryNever"; +})(IdempotencyStrategy || (exports.IdempotencyStrategy = IdempotencyStrategy = {})); +var ExceptionMessages; +(function (ExceptionMessages) { + ExceptionMessages["EXPIRATION_DATE_INVALID"] = "The expiration date provided was invalid."; + ExceptionMessages["EXPIRATION_DATE_PAST"] = "An expiration date cannot be in the past."; +})(ExceptionMessages || (exports.ExceptionMessages = ExceptionMessages = {})); +var StorageExceptionMessages; +(function (StorageExceptionMessages) { + StorageExceptionMessages["BUCKET_NAME_REQUIRED"] = "A bucket name is needed to use Cloud Storage."; + StorageExceptionMessages["BUCKET_NAME_REQUIRED_CREATE"] = "A name is required to create a bucket."; + StorageExceptionMessages["HMAC_SERVICE_ACCOUNT"] = "The first argument must be a service account email to create an HMAC key."; + StorageExceptionMessages["HMAC_ACCESS_ID"] = "An access ID is needed to create an HmacKey object."; +})(StorageExceptionMessages || (exports.StorageExceptionMessages = StorageExceptionMessages = {})); +exports.PROTOCOL_REGEX = /^(\w*):\/\//; +/** + * Default behavior: Automatically retry retriable server errors. + * + * @const {boolean} + */ +exports.AUTO_RETRY_DEFAULT = true; +/** + * Default behavior: Only attempt to retry retriable errors 3 times. + * + * @const {number} + */ +exports.MAX_RETRY_DEFAULT = 3; +/** + * Default behavior: Wait twice as long as previous retry before retrying. + * + * @const {number} + */ +exports.RETRY_DELAY_MULTIPLIER_DEFAULT = 2; +/** + * Default behavior: If the operation doesn't succeed after 600 seconds, + * stop retrying. + * + * @const {number} + */ +exports.TOTAL_TIMEOUT_DEFAULT = 600; +/** + * Default behavior: Wait no more than 64 seconds between retries. + * + * @const {number} + */ +exports.MAX_RETRY_DELAY_DEFAULT = 64; +/** + * Default behavior: Retry conditionally idempotent operations if correct preconditions are set. + * + * @const {enum} + * @private + */ +const IDEMPOTENCY_STRATEGY_DEFAULT = IdempotencyStrategy.RetryConditional; +/** + * Returns true if the API request should be retried, given the error that was + * given the first time the request was attempted. + * @const + * @param {error} err - The API error to check if it is appropriate to retry. + * @return {boolean} True if the API request should be retried, false otherwise. + */ +const RETRYABLE_ERR_FN_DEFAULT = function (err) { + var _a; + const isConnectionProblem = (reason) => { + return (reason.includes('eai_again') || // DNS lookup error + reason === 'econnreset' || + reason === 'unexpected connection closure' || + reason === 'epipe' || + reason === 'socket connection timeout'); + }; + if (err) { + if ([408, 429, 500, 502, 503, 504].indexOf(err.code) !== -1) { + return true; + } + if (typeof err.code === 'string') { + if (['408', '429', '500', '502', '503', '504'].indexOf(err.code) !== -1) { + return true; + } + const reason = err.code.toLowerCase(); + if (isConnectionProblem(reason)) { + return true; + } + } + if (err.errors) { + for (const e of err.errors) { + const reason = (_a = e === null || e === void 0 ? void 0 : e.reason) === null || _a === void 0 ? void 0 : _a.toString().toLowerCase(); + if (reason && isConnectionProblem(reason)) { + return true; + } + } + } + } + return false; +}; +exports.RETRYABLE_ERR_FN_DEFAULT = RETRYABLE_ERR_FN_DEFAULT; +/*! Developer Documentation + * + * Invoke this method to create a new Storage object bound with pre-determined + * configuration options. For each object that can be created (e.g., a bucket), + * there is an equivalent static and instance method. While they are classes, + * they can be instantiated without use of the `new` keyword. + */ +/** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * This object provides constants to refer to the three permission levels that + * can be granted to an entity: + * + * - `gcs.acl.OWNER_ROLE` - ("OWNER") + * - `gcs.acl.READER_ROLE` - ("READER") + * - `gcs.acl.WRITER_ROLE` - ("WRITER") + * + * See {@link https://cloud.google.com/storage/docs/access-control/lists| About Access Control Lists} + * + * @name Storage#acl + * @type {object} + * @property {string} OWNER_ROLE + * @property {string} READER_ROLE + * @property {string} WRITER_ROLE + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const albums = storage.bucket('albums'); + * + * //- + * // Make all of the files currently in a bucket publicly readable. + * //- + * const options = { + * entity: 'allUsers', + * role: storage.acl.READER_ROLE + * }; + * + * albums.acl.add(options, function(err, aclObject) {}); + * + * //- + * // Make any new objects added to a bucket publicly readable. + * //- + * albums.acl.default.add(options, function(err, aclObject) {}); + * + * //- + * // Grant a user ownership permissions to a bucket. + * //- + * albums.acl.add({ + * entity: 'user-useremail@example.com', + * role: storage.acl.OWNER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * albums.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ +/** + * Get {@link Bucket} objects for all of the buckets in your project as + * a readable object stream. + * + * @method Storage#getBucketsStream + * @param {GetBucketsRequest} [query] Query object for listing buckets. + * @returns {ReadableStream} A readable stream that emits {@link Bucket} + * instances. + * + * @example + * ``` + * storage.getBucketsStream() + * .on('error', console.error) + * .on('data', function(bucket) { + * // bucket is a Bucket object. + * }) + * .on('end', function() { + * // All buckets retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * storage.getBucketsStream() + * .on('data', function(bucket) { + * this.end(); + * }); + * ``` + */ +/** + * Get {@link HmacKey} objects for all of the HMAC keys in the project in a + * readable object stream. + * + * @method Storage#getHmacKeysStream + * @param {GetHmacKeysOptions} [options] Configuration options. + * @returns {ReadableStream} A readable stream that emits {@link HmacKey} + * instances. + * + * @example + * ``` + * storage.getHmacKeysStream() + * .on('error', console.error) + * .on('data', function(hmacKey) { + * // hmacKey is an HmacKey object. + * }) + * .on('end', function() { + * // All HmacKey retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * storage.getHmacKeysStream() + * .on('data', function(bucket) { + * this.end(); + * }); + * ``` + */ +/** + *

ACLs

+ * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share files with other users + * and allow other users to access your buckets and files. + * + * To learn more about ACLs, read this overview on + * {@link https://cloud.google.com/storage/docs/access-control| Access Control}. + * + * See {@link https://cloud.google.com/storage/docs/overview| Cloud Storage overview} + * See {@link https://cloud.google.com/storage/docs/access-control| Access Control} + * + * @class + */ +class Storage extends index_js_1.Service { + getBucketsStream() { + // placeholder body, overwritten in constructor + return new stream_1.Readable(); + } + getHmacKeysStream() { + // placeholder body, overwritten in constructor + return new stream_1.Readable(); + } + /** + * @callback Crc32cGeneratorToStringCallback + * A method returning the CRC32C as a base64-encoded string. + * + * @returns {string} + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ + /** + * @callback Crc32cGeneratorValidateCallback + * A method validating a base64-encoded CRC32C string. + * + * @param {string} [value] base64-encoded CRC32C string to validate + * @returns {boolean} + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + **/ + /** + * @callback Crc32cGeneratorUpdateCallback + * A method for passing `Buffer`s for CRC32C generation. + * + * @param {Buffer} [data] data to update CRC32C value with + * @returns {undefined} + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + **/ + /** + * @typedef {object} CRC32CValidator + * @property {Crc32cGeneratorToStringCallback} + * @property {Crc32cGeneratorValidateCallback} + * @property {Crc32cGeneratorUpdateCallback} + */ + /** + * @callback Crc32cGeneratorCallback + * @returns {CRC32CValidator} + */ + /** + * @typedef {object} StorageOptions + * @property {string} [projectId] The project ID from the Google Developer's + * Console, e.g. 'grape-spaceship-123'. We will also check the environment + * variable `GCLOUD_PROJECT` for your project ID. If your app is running + * in an environment which supports {@link + * https://cloud.google.com/docs/authentication/production#providing_credentials_to_your_application + * Application Default Credentials}, your project ID will be detected + * automatically. + * @property {string} [keyFilename] Full path to the a .json, .pem, or .p12 key + * downloaded from the Google Developers Console. If you provide a path to + * a JSON file, the `projectId` option above is not necessary. NOTE: .pem and + * .p12 require you to specify the `email` option as well. + * @property {string} [email] Account email address. Required when using a .pem + * or .p12 keyFilename. + * @property {object} [credentials] Credentials object. + * @property {string} [credentials.client_email] + * @property {string} [credentials.private_key] + * @property {object} [retryOptions] Options for customizing retries. Retriable server errors + * will be retried with exponential delay between them dictated by the formula + * max(maxRetryDelay, retryDelayMultiplier*retryNumber) until maxRetries or totalTimeout + * has been reached. Retries will only happen if autoRetry is set to true. + * @property {boolean} [retryOptions.autoRetry=true] Automatically retry requests if the + * response is related to rate limits or certain intermittent server + * errors. We will exponentially backoff subsequent requests by default. + * @property {number} [retryOptions.retryDelayMultiplier = 2] the multiplier by which to + * increase the delay time between the completion of failed requests, and the + * initiation of the subsequent retrying request. + * @property {number} [retryOptions.totalTimeout = 600] The total time, starting from + * when the initial request is sent, after which an error will + * be returned, regardless of the retrying attempts made meanwhile. + * @property {number} [retryOptions.maxRetryDelay = 64] The maximum delay time between requests. + * When this value is reached, ``retryDelayMultiplier`` will no longer be used to + * increase delay time. + * @property {number} [retryOptions.maxRetries=3] Maximum number of automatic retries + * attempted before returning the error. + * @property {function} [retryOptions.retryableErrorFn] Function that returns true if a given + * error should be retried and false otherwise. + * @property {enum} [retryOptions.idempotencyStrategy=IdempotencyStrategy.RetryConditional] Enumeration + * controls how conditionally idempotent operations are retried. Possible values are: RetryAlways - + * will respect other retry settings and attempt to retry conditionally idempotent operations. RetryConditional - + * will retry conditionally idempotent operations if the correct preconditions are set. RetryNever - never + * retry a conditionally idempotent operation. + * @property {string} [userAgent] The value to be prepended to the User-Agent + * header in API requests. + * @property {object} [authClient] `AuthClient` or `GoogleAuth` client to reuse instead of creating a new one. + * @property {number} [timeout] The amount of time in milliseconds to wait per http request before timing out. + * @property {object[]} [interceptors_] Array of custom request interceptors to be returned in the order they were assigned. + * @property {string} [apiEndpoint = storage.google.com] The API endpoint of the service used to make requests. + * @property {boolean} [useAuthWithCustomEndpoint = false] Controls whether or not to use authentication when using a custom endpoint. + * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C} + */ + /** + * Constructs the Storage client. + * + * @example + * Create a client that uses Application Default Credentials + * (ADC) + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * ``` + * + * @example + * Create a client with explicit credentials + * ``` + * const storage = new Storage({ + * projectId: 'your-project-id', + * keyFilename: '/path/to/keyfile.json' + * }); + * ``` + * + * @example + * Create a client with credentials passed + * by value as a JavaScript object + * ``` + * const storage = new Storage({ + * projectId: 'your-project-id', + * credentials: { + * type: 'service_account', + * project_id: 'xxxxxxx', + * private_key_id: 'xxxx', + * private_key:'-----BEGIN PRIVATE KEY-----xxxxxxx\n-----END PRIVATE KEY-----\n', + * client_email: 'xxxx', + * client_id: 'xxx', + * auth_uri: 'https://accounts.google.com/o/oauth2/auth', + * token_uri: 'https://oauth2.googleapis.com/token', + * auth_provider_x509_cert_url: 'https://www.googleapis.com/oauth2/v1/certs', + * client_x509_cert_url: 'xxx', + * } + * }); + * ``` + * + * @example + * Create a client with credentials passed + * by loading a JSON file directly from disk + * ``` + * const storage = new Storage({ + * projectId: 'your-project-id', + * credentials: require('/path/to-keyfile.json') + * }); + * ``` + * + * @example + * Create a client with an `AuthClient` (e.g. `DownscopedClient`) + * ``` + * const {DownscopedClient} = require('google-auth-library'); + * const authClient = new DownscopedClient({...}); + * + * const storage = new Storage({authClient}); + * ``` + * + * Additional samples: + * - https://github.com/googleapis/google-auth-library-nodejs#sample-usage-1 + * - https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/downscopedclient.js + * + * @param {StorageOptions} [options] Configuration options. + */ + constructor(options = {}) { + var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p; + const universe = options.universeDomain || google_auth_library_1.DEFAULT_UNIVERSE; + let apiEndpoint = `https://storage.${universe}`; + let customEndpoint = false; + // Note: EMULATOR_HOST is an experimental configuration variable. Use apiEndpoint instead. + const EMULATOR_HOST = process.env.STORAGE_EMULATOR_HOST; + if (typeof EMULATOR_HOST === 'string') { + apiEndpoint = Storage.sanitizeEndpoint(EMULATOR_HOST); + customEndpoint = true; + } + if (options.apiEndpoint && options.apiEndpoint !== apiEndpoint) { + apiEndpoint = Storage.sanitizeEndpoint(options.apiEndpoint); + customEndpoint = true; + } + options = Object.assign({}, options, { apiEndpoint }); + // Note: EMULATOR_HOST is an experimental configuration variable. Use apiEndpoint instead. + const baseUrl = EMULATOR_HOST || `${options.apiEndpoint}/storage/v1`; + const config = { + apiEndpoint: options.apiEndpoint, + retryOptions: { + autoRetry: ((_a = options.retryOptions) === null || _a === void 0 ? void 0 : _a.autoRetry) !== undefined + ? (_b = options.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry + : exports.AUTO_RETRY_DEFAULT, + maxRetries: ((_c = options.retryOptions) === null || _c === void 0 ? void 0 : _c.maxRetries) + ? (_d = options.retryOptions) === null || _d === void 0 ? void 0 : _d.maxRetries + : exports.MAX_RETRY_DEFAULT, + retryDelayMultiplier: ((_e = options.retryOptions) === null || _e === void 0 ? void 0 : _e.retryDelayMultiplier) + ? (_f = options.retryOptions) === null || _f === void 0 ? void 0 : _f.retryDelayMultiplier + : exports.RETRY_DELAY_MULTIPLIER_DEFAULT, + totalTimeout: ((_g = options.retryOptions) === null || _g === void 0 ? void 0 : _g.totalTimeout) + ? (_h = options.retryOptions) === null || _h === void 0 ? void 0 : _h.totalTimeout + : exports.TOTAL_TIMEOUT_DEFAULT, + maxRetryDelay: ((_j = options.retryOptions) === null || _j === void 0 ? void 0 : _j.maxRetryDelay) + ? (_k = options.retryOptions) === null || _k === void 0 ? void 0 : _k.maxRetryDelay + : exports.MAX_RETRY_DELAY_DEFAULT, + retryableErrorFn: ((_l = options.retryOptions) === null || _l === void 0 ? void 0 : _l.retryableErrorFn) + ? (_m = options.retryOptions) === null || _m === void 0 ? void 0 : _m.retryableErrorFn + : exports.RETRYABLE_ERR_FN_DEFAULT, + idempotencyStrategy: ((_o = options.retryOptions) === null || _o === void 0 ? void 0 : _o.idempotencyStrategy) !== undefined + ? (_p = options.retryOptions) === null || _p === void 0 ? void 0 : _p.idempotencyStrategy + : IDEMPOTENCY_STRATEGY_DEFAULT, + }, + baseUrl, + customEndpoint, + useAuthWithCustomEndpoint: options === null || options === void 0 ? void 0 : options.useAuthWithCustomEndpoint, + projectIdRequired: false, + scopes: [ + 'https://www.googleapis.com/auth/iam', + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/devstorage.full_control', + ], + packageJson: (0, package_json_helper_cjs_1.getPackageJSON)(), + }; + super(config, options); + /** + * Reference to {@link Storage.acl}. + * + * @name Storage#acl + * @see Storage.acl + */ + this.acl = Storage.acl; + this.crc32cGenerator = + options.crc32cGenerator || crc32c_js_1.CRC32C_DEFAULT_VALIDATOR_GENERATOR; + this.retryOptions = config.retryOptions; + this.getBucketsStream = paginator_1.paginator.streamify('getBuckets'); + this.getHmacKeysStream = paginator_1.paginator.streamify('getHmacKeys'); + } + static sanitizeEndpoint(url) { + if (!exports.PROTOCOL_REGEX.test(url)) { + url = `https://${url}`; + } + return url.replace(/\/+$/, ''); // Remove trailing slashes + } + /** + * Get a reference to a Cloud Storage bucket. + * + * @param {string} name Name of the bucket. + * @param {object} [options] Configuration object. + * @param {string} [options.kmsKeyName] A Cloud KMS key that will be used to + * encrypt objects inserted into this bucket, if no encryption method is + * specified. + * @param {string} [options.userProject] User project to be billed for all + * requests made from this Bucket object. + * @returns {Bucket} + * @see Bucket + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const albums = storage.bucket('albums'); + * const photos = storage.bucket('photos'); + * ``` + */ + bucket(name, options) { + if (!name) { + throw new Error(StorageExceptionMessages.BUCKET_NAME_REQUIRED); + } + return new bucket_js_1.Bucket(this, name, options); + } + /** + * Reference a channel to receive notifications about changes to your bucket. + * + * @param {string} id The ID of the channel. + * @param {string} resourceId The resource ID of the channel. + * @returns {Channel} + * @see Channel + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const channel = storage.channel('id', 'resource-id'); + * ``` + */ + channel(id, resourceId) { + return new channel_js_1.Channel(this, id, resourceId); + } + /** + * @typedef {array} CreateBucketResponse + * @property {Bucket} 0 The new {@link Bucket}. + * @property {object} 1 The full API response. + */ + /** + * @callback CreateBucketCallback + * @param {?Error} err Request error, if any. + * @param {Bucket} bucket The new {@link Bucket}. + * @param {object} apiResponse The full API response. + */ + /** + * Metadata to set for the bucket. + * + * @typedef {object} CreateBucketRequest + * @property {boolean} [archive=false] Specify the storage class as Archive. + * @property {object} [autoclass.enabled=false] Specify whether Autoclass is + * enabled for the bucket. + * @property {object} [autoclass.terminalStorageClass='NEARLINE'] The storage class that objects in an Autoclass bucket eventually transition to if + * they are not read for a certain length of time. Valid values are NEARLINE and ARCHIVE. + * @property {boolean} [coldline=false] Specify the storage class as Coldline. + * @property {Cors[]} [cors=[]] Specify the CORS configuration to use. + * @property {CustomPlacementConfig} [customPlacementConfig={}] Specify the bucket's regions for dual-region buckets. + * For more information, see {@link https://cloud.google.com/storage/docs/locations| Bucket Locations}. + * @property {boolean} [dra=false] Specify the storage class as Durable Reduced + * Availability. + * @property {boolean} [enableObjectRetention=false] Specifiy whether or not object retention should be enabled on this bucket. + * @property {object} [hierarchicalNamespace.enabled=false] Specify whether or not to enable hierarchical namespace on this bucket. + * @property {string} [location] Specify the bucket's location. If specifying + * a dual-region, the `customPlacementConfig` property should be set in conjunction. + * For more information, see {@link https://cloud.google.com/storage/docs/locations| Bucket Locations}. + * @property {boolean} [multiRegional=false] Specify the storage class as + * Multi-Regional. + * @property {boolean} [nearline=false] Specify the storage class as Nearline. + * @property {boolean} [regional=false] Specify the storage class as Regional. + * @property {boolean} [requesterPays=false] Force the use of the User Project metadata field to assign operational + * costs when an operation is made on a Bucket and its objects. + * @property {string} [rpo] For dual-region buckets, controls whether turbo + * replication is enabled (`ASYNC_TURBO`) or disabled (`DEFAULT`). + * @property {boolean} [standard=true] Specify the storage class as Standard. + * @property {string} [storageClass] The new storage class. (`standard`, + * `nearline`, `coldline`, or `archive`). + * **Note:** The storage classes `multi_regional`, `regional`, and + * `durable_reduced_availability` are now legacy and will be deprecated in + * the future. + * @property {Versioning} [versioning=undefined] Specify the versioning status. + * @property {string} [userProject] The ID of the project which will be billed + * for the request. + */ + /** + * Create a bucket. + * + * Cloud Storage uses a flat namespace, so you can't create a bucket with + * a name that is already in use. For more information, see + * {@link https://cloud.google.com/storage/docs/bucketnaming.html#requirements| Bucket Naming Guidelines}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/insert| Buckets: insert API Documentation} + * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes} + * + * @param {string} name Name of the bucket to create. + * @param {CreateBucketRequest} [metadata] Metadata to set for the bucket. + * @param {CreateBucketCallback} [callback] Callback function. + * @returns {Promise} + * @throws {Error} If a name is not provided. + * @see Bucket#create + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const callback = function(err, bucket, apiResponse) { + * // `bucket` is a Bucket object. + * }; + * + * storage.createBucket('new-bucket', callback); + * + * //- + * // Create a bucket in a specific location and region. See the + * // Official JSON API docs for complete details on the `location` + * option. + * // + * //- + * const metadata = { + * location: 'US-CENTRAL1', + * regional: true + * }; + * + * storage.createBucket('new-bucket', metadata, callback); + * + * //- + * // Create a bucket with a retention policy of 6 months. + * //- + * const metadata = { + * retentionPolicy: { + * retentionPeriod: 15780000 // 6 months in seconds. + * } + * }; + * + * storage.createBucket('new-bucket', metadata, callback); + * + * //- + * // Enable versioning on a new bucket. + * //- + * const metadata = { + * versioning: { + * enabled: true + * } + * }; + * + * storage.createBucket('new-bucket', metadata, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.createBucket('new-bucket').then(function(data) { + * const bucket = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/buckets.js + * region_tag:storage_create_bucket + * Another example: + */ + createBucket(name, metadataOrCallback, callback) { + if (!name) { + throw new Error(StorageExceptionMessages.BUCKET_NAME_REQUIRED_CREATE); + } + let metadata; + if (!callback) { + callback = metadataOrCallback; + metadata = {}; + } + else { + metadata = metadataOrCallback; + } + const body = { + ...metadata, + name, + }; + const storageClasses = { + archive: 'ARCHIVE', + coldline: 'COLDLINE', + dra: 'DURABLE_REDUCED_AVAILABILITY', + multiRegional: 'MULTI_REGIONAL', + nearline: 'NEARLINE', + regional: 'REGIONAL', + standard: 'STANDARD', + }; + const storageClassKeys = Object.keys(storageClasses); + for (const storageClass of storageClassKeys) { + if (body[storageClass]) { + if (metadata.storageClass && metadata.storageClass !== storageClass) { + throw new Error(`Both \`${storageClass}\` and \`storageClass\` were provided.`); + } + body.storageClass = storageClasses[storageClass]; + delete body[storageClass]; + } + } + if (body.requesterPays) { + body.billing = { + requesterPays: body.requesterPays, + }; + delete body.requesterPays; + } + const query = { + project: this.projectId, + }; + if (body.userProject) { + query.userProject = body.userProject; + delete body.userProject; + } + if (body.enableObjectRetention) { + query.enableObjectRetention = body.enableObjectRetention; + delete body.enableObjectRetention; + } + if (body.predefinedAcl) { + query.predefinedAcl = body.predefinedAcl; + delete body.predefinedAcl; + } + if (body.predefinedDefaultObjectAcl) { + query.predefinedDefaultObjectAcl = body.predefinedDefaultObjectAcl; + delete body.predefinedDefaultObjectAcl; + } + if (body.projection) { + query.projection = body.projection; + delete body.projection; + } + this.request({ + method: 'POST', + uri: '/b', + qs: query, + json: body, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + const bucket = this.bucket(name); + bucket.metadata = resp; + callback(null, bucket, resp); + }); + } + /** + * @typedef {object} CreateHmacKeyOptions + * @property {string} [projectId] The project ID of the project that owns + * the service account of the requested HMAC key. If not provided, + * the project ID used to instantiate the Storage client will be used. + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * @typedef {object} HmacKeyMetadata + * @property {string} accessId The access id identifies which HMAC key was + * used to sign a request when authenticating with HMAC. + * @property {string} etag Used to perform a read-modify-write of the key. + * @property {string} id The resource name of the HMAC key. + * @property {string} projectId The project ID. + * @property {string} serviceAccountEmail The service account's email this + * HMAC key is created for. + * @property {string} state The state of this HMAC key. One of "ACTIVE", + * "INACTIVE" or "DELETED". + * @property {string} timeCreated The creation time of the HMAC key in + * RFC 3339 format. + * @property {string} [updated] The time this HMAC key was last updated in + * RFC 3339 format. + */ + /** + * @typedef {array} CreateHmacKeyResponse + * @property {HmacKey} 0 The HmacKey instance created from API response. + * @property {string} 1 The HMAC key's secret used to access the XML API. + * @property {object} 3 The raw API response. + */ + /** + * @callback CreateHmacKeyCallback Callback function. + * @param {?Error} err Request error, if any. + * @param {HmacKey} hmacKey The HmacKey instance created from API response. + * @param {string} secret The HMAC key's secret used to access the XML API. + * @param {object} apiResponse The raw API response. + */ + /** + * Create an HMAC key associated with an service account to authenticate + * requests to the Cloud Storage XML API. + * + * See {@link https://cloud.google.com/storage/docs/authentication/hmackeys| HMAC keys documentation} + * + * @param {string} serviceAccountEmail The service account's email address + * with which the HMAC key is created for. + * @param {CreateHmacKeyCallback} [callback] Callback function. + * @return {Promise} + * + * @example + * ``` + * const {Storage} = require('google-cloud/storage'); + * const storage = new Storage(); + * + * // Replace with your service account's email address + * const serviceAccountEmail = + * 'my-service-account@appspot.gserviceaccount.com'; + * + * storage.createHmacKey(serviceAccountEmail, function(err, hmacKey, secret) { + * if (!err) { + * // Securely store the secret for use with the XML API. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.createHmacKey(serviceAccountEmail) + * .then((response) => { + * const hmacKey = response[0]; + * const secret = response[1]; + * // Securely store the secret for use with the XML API. + * }); + * ``` + */ + createHmacKey(serviceAccountEmail, optionsOrCb, cb) { + if (typeof serviceAccountEmail !== 'string') { + throw new Error(StorageExceptionMessages.HMAC_SERVICE_ACCOUNT); + } + const { options, callback } = (0, util_js_1.normalize)(optionsOrCb, cb); + const query = Object.assign({}, options, { serviceAccountEmail }); + const projectId = query.projectId || this.projectId; + delete query.projectId; + this.request({ + method: 'POST', + uri: `/projects/${projectId}/hmacKeys`, + qs: query, + maxRetries: 0, //explicitly set this value since this is a non-idempotent function + }, (err, resp) => { + if (err) { + callback(err, null, null, resp); + return; + } + const metadata = resp.metadata; + const hmacKey = this.hmacKey(metadata.accessId, { + projectId: metadata.projectId, + }); + hmacKey.metadata = resp.metadata; + callback(null, hmacKey, resp.secret, resp); + }); + } + /** + * Query object for listing buckets. + * + * @typedef {object} GetBucketsRequest + * @property {boolean} [autoPaginate=true] Have pagination handled + * automatically. + * @property {number} [maxApiCalls] Maximum number of API calls to make. + * @property {number} [maxResults] Maximum number of items plus prefixes to + * return per call. + * Note: By default will handle pagination automatically + * if more than 1 page worth of results are requested per call. + * When `autoPaginate` is set to `false` the smaller of `maxResults` + * or 1 page of results will be returned per call. + * @property {string} [pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @property {string} [userProject] The ID of the project which will be billed + * for the request. + * @param {boolean} [softDeleted] If true, returns the soft-deleted object. + * Object `generation` is required if `softDeleted` is set to True. + */ + /** + * @typedef {array} GetBucketsResponse + * @property {Bucket[]} 0 Array of {@link Bucket} instances. + * @property {object} 1 nextQuery A query object to receive more results. + * @property {object} 2 The full API response. + */ + /** + * @callback GetBucketsCallback + * @param {?Error} err Request error, if any. + * @param {Bucket[]} buckets Array of {@link Bucket} instances. + * @param {object} nextQuery A query object to receive more results. + * @param {object} apiResponse The full API response. + */ + /** + * Get Bucket objects for all of the buckets in your project. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/list| Buckets: list API Documentation} + * + * @param {GetBucketsRequest} [query] Query object for listing buckets. + * @param {GetBucketsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * storage.getBuckets(function(err, buckets) { + * if (!err) { + * // buckets is an array of Bucket objects. + * } + * }); + * + * //- + * // To control how many API requests are made and page through the results + * // manually, set `autoPaginate` to `false`. + * //- + * const callback = function(err, buckets, nextQuery, apiResponse) { + * if (nextQuery) { + * // More results exist. + * storage.getBuckets(nextQuery, callback); + * } + * + * // The `metadata` property is populated for you with the metadata at the + * // time of fetching. + * buckets[0].metadata; + * + * // However, in cases where you are concerned the metadata could have + * // changed, use the `getMetadata` method. + * buckets[0].getMetadata(function(err, metadata, apiResponse) {}); + * }; + * + * storage.getBuckets({ + * autoPaginate: false + * }, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.getBuckets().then(function(data) { + * const buckets = data[0]; + * }); + * + * ``` + * @example include:samples/buckets.js + * region_tag:storage_list_buckets + * Another example: + */ + getBuckets(optionsOrCallback, cb) { + const { options, callback } = (0, util_js_1.normalize)(optionsOrCallback, cb); + options.project = options.project || this.projectId; + this.request({ + uri: '/b', + qs: options, + }, (err, resp) => { + if (err) { + callback(err, null, null, resp); + return; + } + const itemsArray = resp.items ? resp.items : []; + const buckets = itemsArray.map((bucket) => { + const bucketInstance = this.bucket(bucket.id); + bucketInstance.metadata = bucket; + return bucketInstance; + }); + const nextQuery = resp.nextPageToken + ? Object.assign({}, options, { pageToken: resp.nextPageToken }) + : null; + callback(null, buckets, nextQuery, resp); + }); + } + getHmacKeys(optionsOrCb, cb) { + const { options, callback } = (0, util_js_1.normalize)(optionsOrCb, cb); + const query = Object.assign({}, options); + const projectId = query.projectId || this.projectId; + delete query.projectId; + this.request({ + uri: `/projects/${projectId}/hmacKeys`, + qs: query, + }, (err, resp) => { + if (err) { + callback(err, null, null, resp); + return; + } + const itemsArray = resp.items ? resp.items : []; + const hmacKeys = itemsArray.map((hmacKey) => { + const hmacKeyInstance = this.hmacKey(hmacKey.accessId, { + projectId: hmacKey.projectId, + }); + hmacKeyInstance.metadata = hmacKey; + return hmacKeyInstance; + }); + const nextQuery = resp.nextPageToken + ? Object.assign({}, options, { pageToken: resp.nextPageToken }) + : null; + callback(null, hmacKeys, nextQuery, resp); + }); + } + /** + * @typedef {array} GetServiceAccountResponse + * @property {object} 0 The service account resource. + * @property {object} 1 The full + * {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| API response}. + */ + /** + * @callback GetServiceAccountCallback + * @param {?Error} err Request error, if any. + * @param {object} serviceAccount The serviceAccount resource. + * @param {string} serviceAccount.emailAddress The service account email + * address. + * @param {object} apiResponse The full + * {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| API response}. + */ + /** + * Get the email address of this project's Google Cloud Storage service + * account. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount/get| Projects.serviceAccount: get API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| Projects.serviceAccount Resource} + * + * @param {object} [options] Configuration object. + * @param {string} [options.userProject] User project to be billed for this + * request. + * @param {GetServiceAccountCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * storage.getServiceAccount(function(err, serviceAccount, apiResponse) { + * if (!err) { + * const serviceAccountEmail = serviceAccount.emailAddress; + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.getServiceAccount().then(function(data) { + * const serviceAccountEmail = data[0].emailAddress; + * const apiResponse = data[1]; + * }); + * ``` + */ + getServiceAccount(optionsOrCallback, cb) { + const { options, callback } = (0, util_js_1.normalize)(optionsOrCallback, cb); + this.request({ + uri: `/projects/${this.projectId}/serviceAccount`, + qs: options, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + const camelCaseResponse = {}; + for (const prop in resp) { + // eslint-disable-next-line no-prototype-builtins + if (resp.hasOwnProperty(prop)) { + const camelCaseProp = prop.replace(/_(\w)/g, (_, match) => match.toUpperCase()); + camelCaseResponse[camelCaseProp] = resp[prop]; + } + } + callback(null, camelCaseResponse, resp); + }); + } + /** + * Get a reference to an HmacKey object. + * Note: this does not fetch the HMAC key's metadata. Use HmacKey#get() to + * retrieve and populate the metadata. + * + * To get a reference to an HMAC key that's not created for a service + * account in the same project used to instantiate the Storage client, + * supply the project's ID as `projectId` in the `options` argument. + * + * @param {string} accessId The HMAC key's access ID. + * @param {HmacKeyOptions} options HmacKey constructor options. + * @returns {HmacKey} + * @see HmacKey + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const hmacKey = storage.hmacKey('ACCESS_ID'); + * ``` + */ + hmacKey(accessId, options) { + if (!accessId) { + throw new Error(StorageExceptionMessages.HMAC_ACCESS_ID); + } + return new hmacKey_js_1.HmacKey(this, accessId, options); + } +} +exports.Storage = Storage; +/** + * {@link Bucket} class. + * + * @name Storage.Bucket + * @see Bucket + * @type {Constructor} + */ +Storage.Bucket = bucket_js_1.Bucket; +/** + * {@link Channel} class. + * + * @name Storage.Channel + * @see Channel + * @type {Constructor} + */ +Storage.Channel = channel_js_1.Channel; +/** + * {@link File} class. + * + * @name Storage.File + * @see File + * @type {Constructor} + */ +Storage.File = file_js_1.File; +/** + * {@link HmacKey} class. + * + * @name Storage.HmacKey + * @see HmacKey + * @type {Constructor} + */ +Storage.HmacKey = hmacKey_js_1.HmacKey; +Storage.acl = { + OWNER_ROLE: 'OWNER', + READER_ROLE: 'READER', + WRITER_ROLE: 'WRITER', +}; +/*! Developer Documentation + * + * These methods can be auto-paginated. + */ +paginator_1.paginator.extend(Storage, ['getBuckets', 'getHmacKeys']); +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Storage, { + exclude: ['bucket', 'channel', 'hmacKey'], +}); + + +/***/ }), + +/***/ 4: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +var _XMLMultiPartUploadHelper_instances, _XMLMultiPartUploadHelper_setGoogApiClientHeaders, _XMLMultiPartUploadHelper_handleErrorResponse; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.TransferManager = exports.MultiPartUploadError = void 0; +const file_js_1 = __nccwpck_require__(9975); +const p_limit_1 = __importDefault(__nccwpck_require__(9977)); +const path = __importStar(__nccwpck_require__(6928)); +const fs_1 = __nccwpck_require__(9896); +const crc32c_js_1 = __nccwpck_require__(4317); +const google_auth_library_1 = __nccwpck_require__(492); +const fast_xml_parser_1 = __nccwpck_require__(9741); +const async_retry_1 = __importDefault(__nccwpck_require__(5195)); +const crypto_1 = __nccwpck_require__(6982); +const util_js_1 = __nccwpck_require__(7325); +const util_js_2 = __nccwpck_require__(4557); +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +const package_json_helper_cjs_1 = __nccwpck_require__(1969); +const packageJson = (0, package_json_helper_cjs_1.getPackageJSON)(); +/** + * Default number of concurrently executing promises to use when calling uploadManyFiles. + * + */ +const DEFAULT_PARALLEL_UPLOAD_LIMIT = 5; +/** + * Default number of concurrently executing promises to use when calling downloadManyFiles. + * + */ +const DEFAULT_PARALLEL_DOWNLOAD_LIMIT = 5; +/** + * Default number of concurrently executing promises to use when calling downloadFileInChunks. + * + */ +const DEFAULT_PARALLEL_CHUNKED_DOWNLOAD_LIMIT = 5; +/** + * The minimum size threshold in bytes at which to apply a chunked download strategy when calling downloadFileInChunks. + * + */ +const DOWNLOAD_IN_CHUNKS_FILE_SIZE_THRESHOLD = 32 * 1024 * 1024; +/** + * The chunk size in bytes to use when calling downloadFileInChunks. + * + */ +const DOWNLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE = 32 * 1024 * 1024; +/** + * The chunk size in bytes to use when calling uploadFileInChunks. + * + */ +const UPLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE = 32 * 1024 * 1024; +/** + * Default number of concurrently executing promises to use when calling uploadFileInChunks. + * + */ +const DEFAULT_PARALLEL_CHUNKED_UPLOAD_LIMIT = 5; +const EMPTY_REGEX = '(?:)'; +/** + * The `gccl-gcs-cmd` value for the `X-Goog-API-Client` header. + * Example: `gccl-gcs-cmd/tm.upload_many` + * + * @see {@link GCCL_GCS_CMD}. + * @see {@link GCCL_GCS_CMD_KEY}. + */ +const GCCL_GCS_CMD_FEATURE = { + UPLOAD_MANY: 'tm.upload_many', + DOWNLOAD_MANY: 'tm.download_many', + UPLOAD_SHARDED: 'tm.upload_sharded', + DOWNLOAD_SHARDED: 'tm.download_sharded', +}; +const defaultMultiPartGenerator = (bucket, fileName, uploadId, partsMap) => { + return new XMLMultiPartUploadHelper(bucket, fileName, uploadId, partsMap); +}; +class MultiPartUploadError extends Error { + constructor(message, uploadId, partsMap) { + super(message); + this.uploadId = uploadId; + this.partsMap = partsMap; + } +} +exports.MultiPartUploadError = MultiPartUploadError; +/** + * Class representing an implementation of MPU in the XML API. This class is not meant for public usage. + * + * @private + * + */ +class XMLMultiPartUploadHelper { + constructor(bucket, fileName, uploadId, partsMap) { + _XMLMultiPartUploadHelper_instances.add(this); + this.authClient = bucket.storage.authClient || new google_auth_library_1.GoogleAuth(); + this.uploadId = uploadId || ''; + this.bucket = bucket; + this.fileName = fileName; + this.baseUrl = `https://${bucket.name}.${new URL(this.bucket.storage.apiEndpoint).hostname}/${fileName}`; + this.xmlBuilder = new fast_xml_parser_1.XMLBuilder({ arrayNodeName: 'Part' }); + this.xmlParser = new fast_xml_parser_1.XMLParser(); + this.partsMap = partsMap || new Map(); + this.retryOptions = { + retries: this.bucket.storage.retryOptions.maxRetries, + factor: this.bucket.storage.retryOptions.retryDelayMultiplier, + maxTimeout: this.bucket.storage.retryOptions.maxRetryDelay * 1000, + maxRetryTime: this.bucket.storage.retryOptions.totalTimeout * 1000, + }; + } + /** + * Initiates a multipart upload (MPU) to the XML API and stores the resultant upload id. + * + * @returns {Promise} + */ + async initiateUpload(headers = {}) { + const url = `${this.baseUrl}?uploads`; + return (0, async_retry_1.default)(async (bail) => { + try { + const res = await this.authClient.request({ + headers: __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_setGoogApiClientHeaders).call(this, headers), + method: 'POST', + url, + }); + if (res.data && res.data.error) { + throw res.data.error; + } + const parsedXML = this.xmlParser.parse(res.data); + this.uploadId = parsedXML.InitiateMultipartUploadResult.UploadId; + } + catch (e) { + __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail); + } + }, this.retryOptions); + } + /** + * Uploads the provided chunk of data to the XML API using the previously created upload id. + * + * @param {number} partNumber the sequence number of this chunk. + * @param {Buffer} chunk the chunk of data to be uploaded. + * @param {string | false} validation whether or not to include the md5 hash in the headers to cause the server + * to validate the chunk was not corrupted. + * @returns {Promise} + */ + async uploadPart(partNumber, chunk, validation) { + const url = `${this.baseUrl}?partNumber=${partNumber}&uploadId=${this.uploadId}`; + let headers = __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_setGoogApiClientHeaders).call(this); + if (validation === 'md5') { + const hash = (0, crypto_1.createHash)('md5').update(chunk).digest('base64'); + headers = { + 'Content-MD5': hash, + }; + } + return (0, async_retry_1.default)(async (bail) => { + try { + const res = await this.authClient.request({ + url, + method: 'PUT', + body: chunk, + headers, + }); + if (res.data && res.data.error) { + throw res.data.error; + } + this.partsMap.set(partNumber, res.headers['etag']); + } + catch (e) { + __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail); + } + }, this.retryOptions); + } + /** + * Sends the final request of the MPU to tell GCS the upload is now complete. + * + * @returns {Promise} + */ + async completeUpload() { + const url = `${this.baseUrl}?uploadId=${this.uploadId}`; + const sortedMap = new Map([...this.partsMap.entries()].sort((a, b) => a[0] - b[0])); + const parts = []; + for (const entry of sortedMap.entries()) { + parts.push({ PartNumber: entry[0], ETag: entry[1] }); + } + const body = `${this.xmlBuilder.build(parts)}`; + return (0, async_retry_1.default)(async (bail) => { + try { + const res = await this.authClient.request({ + headers: __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_setGoogApiClientHeaders).call(this), + url, + method: 'POST', + body, + }); + if (res.data && res.data.error) { + throw res.data.error; + } + return res; + } + catch (e) { + __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail); + return; + } + }, this.retryOptions); + } + /** + * Aborts an multipart upload that is in progress. Once aborted, any parts in the process of being uploaded fail, + * and future requests using the upload ID fail. + * + * @returns {Promise} + */ + async abortUpload() { + const url = `${this.baseUrl}?uploadId=${this.uploadId}`; + return (0, async_retry_1.default)(async (bail) => { + try { + const res = await this.authClient.request({ + url, + method: 'DELETE', + }); + if (res.data && res.data.error) { + throw res.data.error; + } + } + catch (e) { + __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail); + return; + } + }, this.retryOptions); + } +} +_XMLMultiPartUploadHelper_instances = new WeakSet(), _XMLMultiPartUploadHelper_setGoogApiClientHeaders = function _XMLMultiPartUploadHelper_setGoogApiClientHeaders(headers = {}) { + let headerFound = false; + let userAgentFound = false; + for (const [key, value] of Object.entries(headers)) { + if (key.toLocaleLowerCase().trim() === 'x-goog-api-client') { + headerFound = true; + // Prepend command feature to value, if not already there + if (!value.includes(GCCL_GCS_CMD_FEATURE.UPLOAD_SHARDED)) { + headers[key] = + `${value} gccl-gcs-cmd/${GCCL_GCS_CMD_FEATURE.UPLOAD_SHARDED}`; + } + } + else if (key.toLocaleLowerCase().trim() === 'user-agent') { + userAgentFound = true; + } + } + // If the header isn't present, add it + if (!headerFound) { + headers['x-goog-api-client'] = `${(0, util_js_2.getRuntimeTrackingString)()} gccl/${packageJson.version} gccl-gcs-cmd/${GCCL_GCS_CMD_FEATURE.UPLOAD_SHARDED}`; + } + // If the User-Agent isn't present, add it + if (!userAgentFound) { + headers['User-Agent'] = (0, util_js_2.getUserAgentString)(); + } + return headers; +}, _XMLMultiPartUploadHelper_handleErrorResponse = function _XMLMultiPartUploadHelper_handleErrorResponse(err, bail) { + if (this.bucket.storage.retryOptions.autoRetry && + this.bucket.storage.retryOptions.retryableErrorFn(err)) { + throw err; + } + else { + bail(err); + } +}; +/** + * Create a TransferManager object to perform parallel transfer operations on a Cloud Storage bucket. + * + * @class + * @hideconstructor + * + * @param {Bucket} bucket A {@link Bucket} instance + * + */ +class TransferManager { + constructor(bucket) { + this.bucket = bucket; + } + /** + * @typedef {object} UploadManyFilesOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when uploading the files. + * @property {Function} [customDestinationBuilder] A fuction that will take the current path of a local file + * and return a string representing a custom path to be used to upload the file to GCS. + * @property {boolean} [skipIfExists] Do not upload the file if it already exists in + * the bucket. This will set the precondition ifGenerationMatch = 0. + * @property {string} [prefix] A prefix to append to all of the uploaded files. + * @property {object} [passthroughOptions] {@link UploadOptions} Options to be passed through + * to each individual upload operation. + * + */ + /** + * Upload multiple files in parallel to the bucket. This is a convenience method + * that utilizes {@link Bucket#upload} to perform the upload. + * + * @param {array | string} [filePathsOrDirectory] An array of fully qualified paths to the files or a directory name. + * If a directory name is provided, the directory will be recursively walked and all files will be added to the upload list. + * to be uploaded to the bucket + * @param {UploadManyFilesOptions} [options] Configuration options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Upload multiple files in parallel. + * //- + * const response = await transferManager.uploadManyFiles(['/local/path/file1.txt, 'local/path/file2.txt']); + * // Your bucket now contains: + * // - "local/path/file1.txt" (with the contents of '/local/path/file1.txt') + * // - "local/path/file2.txt" (with the contents of '/local/path/file2.txt') + * const response = await transferManager.uploadManyFiles('/local/directory'); + * // Your bucket will now contain all files contained in '/local/directory' maintaining the subdirectory structure. + * ``` + * + */ + async uploadManyFiles(filePathsOrDirectory, options = {}) { + var _a; + if (options.skipIfExists && ((_a = options.passthroughOptions) === null || _a === void 0 ? void 0 : _a.preconditionOpts)) { + options.passthroughOptions.preconditionOpts.ifGenerationMatch = 0; + } + else if (options.skipIfExists && + options.passthroughOptions === undefined) { + options.passthroughOptions = { + preconditionOpts: { + ifGenerationMatch: 0, + }, + }; + } + const limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_UPLOAD_LIMIT); + const promises = []; + let allPaths = []; + if (!Array.isArray(filePathsOrDirectory)) { + for await (const curPath of this.getPathsFromDirectory(filePathsOrDirectory)) { + allPaths.push(curPath); + } + } + else { + allPaths = filePathsOrDirectory; + } + for (const filePath of allPaths) { + const stat = await fs_1.promises.lstat(filePath); + if (stat.isDirectory()) { + continue; + } + const passThroughOptionsCopy = { + ...options.passthroughOptions, + [util_js_1.GCCL_GCS_CMD_KEY]: GCCL_GCS_CMD_FEATURE.UPLOAD_MANY, + }; + passThroughOptionsCopy.destination = options.customDestinationBuilder + ? options.customDestinationBuilder(filePath, options) + : filePath.split(path.sep).join(path.posix.sep); + if (options.prefix) { + passThroughOptionsCopy.destination = path.posix.join(...options.prefix.split(path.sep), passThroughOptionsCopy.destination); + } + promises.push(limit(() => this.bucket.upload(filePath, passThroughOptionsCopy))); + } + return Promise.all(promises); + } + /** + * @typedef {object} DownloadManyFilesOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when downloading the files. + * @property {string} [prefix] A prefix to append to all of the downloaded files. + * @property {string} [stripPrefix] A prefix to remove from all of the downloaded files. + * @property {object} [passthroughOptions] {@link DownloadOptions} Options to be passed through + * to each individual download operation. + * @property {boolean} [skipIfExists] Do not download the file if it already exists in + * the destination. + * + */ + /** + * Download multiple files in parallel to the local filesystem. This is a convenience method + * that utilizes {@link File#download} to perform the download. + * + * @param {array | string} [filesOrFolder] An array of file name strings or file objects to be downloaded. If + * a string is provided this will be treated as a GCS prefix and all files with that prefix will be downloaded. + * @param {DownloadManyFilesOptions} [options] Configuration options. Setting options.prefix or options.stripPrefix + * or options.passthroughOptions.destination will cause the downloaded files to be written to the file system + * instead of being returned as a buffer. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Download multiple files in parallel. + * //- + * const response = await transferManager.downloadManyFiles(['file1.txt', 'file2.txt']); + * // The following files have been downloaded: + * // - "file1.txt" (with the contents from my-bucket.file1.txt) + * // - "file2.txt" (with the contents from my-bucket.file2.txt) + * const response = await transferManager.downloadManyFiles([bucket.File('file1.txt'), bucket.File('file2.txt')]); + * // The following files have been downloaded: + * // - "file1.txt" (with the contents from my-bucket.file1.txt) + * // - "file2.txt" (with the contents from my-bucket.file2.txt) + * const response = await transferManager.downloadManyFiles('test-folder'); + * // All files with GCS prefix of 'test-folder' have been downloaded. + * ``` + * + */ + async downloadManyFiles(filesOrFolder, options = {}) { + const limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_DOWNLOAD_LIMIT); + const promises = []; + let files = []; + if (!Array.isArray(filesOrFolder)) { + const directoryFiles = await this.bucket.getFiles({ + prefix: filesOrFolder, + }); + files = directoryFiles[0]; + } + else { + files = filesOrFolder.map(curFile => { + if (typeof curFile === 'string') { + return this.bucket.file(curFile); + } + return curFile; + }); + } + const stripRegexString = options.stripPrefix + ? `^${options.stripPrefix}` + : EMPTY_REGEX; + const regex = new RegExp(stripRegexString, 'g'); + for (const file of files) { + const passThroughOptionsCopy = { + ...options.passthroughOptions, + [util_js_1.GCCL_GCS_CMD_KEY]: GCCL_GCS_CMD_FEATURE.DOWNLOAD_MANY, + }; + if (options.prefix || passThroughOptionsCopy.destination) { + passThroughOptionsCopy.destination = path.join(options.prefix || '', passThroughOptionsCopy.destination || '', file.name); + } + if (options.stripPrefix) { + passThroughOptionsCopy.destination = file.name.replace(regex, ''); + } + if (options.skipIfExists && + (0, fs_1.existsSync)(passThroughOptionsCopy.destination || '')) { + continue; + } + promises.push(limit(async () => { + const destination = passThroughOptionsCopy.destination; + if (destination && destination.endsWith(path.sep)) { + await fs_1.promises.mkdir(destination, { recursive: true }); + return Promise.resolve([ + Buffer.alloc(0), + ]); + } + return file.download(passThroughOptionsCopy); + })); + } + return Promise.all(promises); + } + /** + * @typedef {object} DownloadFileInChunksOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when downloading the file. + * @property {number} [chunkSizeBytes] The size in bytes of each chunk to be downloaded. + * @property {string | boolean} [validation] Whether or not to perform a CRC32C validation check when download is complete. + * @property {boolean} [noReturnData] Whether or not to return the downloaded data. A `true` value here would be useful for files with a size that will not fit into memory. + * + */ + /** + * Download a large file in chunks utilizing parallel download operations. This is a convenience method + * that utilizes {@link File#download} to perform the download. + * + * @param {File | string} fileOrName {@link File} to download. + * @param {DownloadFileInChunksOptions} [options] Configuration options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Download a large file in chunks utilizing parallel operations. + * //- + * const response = await transferManager.downloadFileInChunks(bucket.file('large-file.txt'); + * // Your local directory now contains: + * // - "large-file.txt" (with the contents from my-bucket.large-file.txt) + * ``` + * + */ + async downloadFileInChunks(fileOrName, options = {}) { + let chunkSize = options.chunkSizeBytes || DOWNLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE; + let limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_CHUNKED_DOWNLOAD_LIMIT); + const noReturnData = Boolean(options.noReturnData); + const promises = []; + const file = typeof fileOrName === 'string' + ? this.bucket.file(fileOrName) + : fileOrName; + const fileInfo = await file.get(); + const size = parseInt(fileInfo[0].metadata.size.toString()); + // If the file size does not meet the threshold download it as a single chunk. + if (size < DOWNLOAD_IN_CHUNKS_FILE_SIZE_THRESHOLD) { + limit = (0, p_limit_1.default)(1); + chunkSize = size; + } + let start = 0; + const filePath = options.destination || path.basename(file.name); + const fileToWrite = await fs_1.promises.open(filePath, 'w'); + while (start < size) { + const chunkStart = start; + let chunkEnd = start + chunkSize - 1; + chunkEnd = chunkEnd > size ? size : chunkEnd; + promises.push(limit(async () => { + const resp = await file.download({ + start: chunkStart, + end: chunkEnd, + [util_js_1.GCCL_GCS_CMD_KEY]: GCCL_GCS_CMD_FEATURE.DOWNLOAD_SHARDED, + }); + const result = await fileToWrite.write(resp[0], 0, resp[0].length, chunkStart); + if (noReturnData) + return; + return result.buffer; + })); + start += chunkSize; + } + let chunks; + try { + chunks = await Promise.all(promises); + } + finally { + await fileToWrite.close(); + } + if (options.validation === 'crc32c' && fileInfo[0].metadata.crc32c) { + const downloadedCrc32C = await crc32c_js_1.CRC32C.fromFile(filePath); + if (!downloadedCrc32C.validate(fileInfo[0].metadata.crc32c)) { + const mismatchError = new file_js_1.RequestError(file_js_1.FileExceptionMessages.DOWNLOAD_MISMATCH); + mismatchError.code = 'CONTENT_DOWNLOAD_MISMATCH'; + throw mismatchError; + } + } + if (noReturnData) + return; + return [Buffer.concat(chunks, size)]; + } + /** + * @typedef {object} UploadFileInChunksOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when uploading the file. + * @property {number} [chunkSizeBytes] The size in bytes of each chunk to be uploaded. + * @property {string} [uploadName] Name of the file when saving to GCS. If ommitted the name is taken from the file path. + * @property {number} [maxQueueSize] The number of chunks to be uploaded to hold in memory concurrently. If not specified + * defaults to the specified concurrency limit. + * @property {string} [uploadId] If specified attempts to resume a previous upload. + * @property {Map} [partsMap] If specified alongside uploadId, attempts to resume a previous upload from the last chunk + * specified in partsMap + * @property {object} [headers] headers to be sent when initiating the multipart upload. + * See {@link https://cloud.google.com/storage/docs/xml-api/post-object-multipart#request_headers| Request Headers: Initiate a Multipart Upload} + * @property {boolean} [autoAbortFailure] boolean to indicate if an in progress upload session will be automatically aborted upon failure. If not set, + * failures will be automatically aborted. + * + */ + /** + * Upload a large file in chunks utilizing parallel upload opertions. If the upload fails, an uploadId and + * map containing all the successfully uploaded parts will be returned to the caller. These arguments can be used to + * resume the upload. + * + * @param {string} [filePath] The path of the file to be uploaded + * @param {UploadFileInChunksOptions} [options] Configuration options. + * @param {MultiPartHelperGenerator} [generator] A function that will return a type that implements the MPU interface. Most users will not need to use this. + * @returns {Promise} If successful a promise resolving to void, otherwise a error containing the message, uploadid, and parts map. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Upload a large file in chunks utilizing parallel operations. + * //- + * const response = await transferManager.uploadFileInChunks('large-file.txt'); + * // Your bucket now contains: + * // - "large-file.txt" + * ``` + * + * + */ + async uploadFileInChunks(filePath, options = {}, generator = defaultMultiPartGenerator) { + const chunkSize = options.chunkSizeBytes || UPLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE; + const limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_CHUNKED_UPLOAD_LIMIT); + const maxQueueSize = options.maxQueueSize || + options.concurrencyLimit || + DEFAULT_PARALLEL_CHUNKED_UPLOAD_LIMIT; + const fileName = options.uploadName || path.basename(filePath); + const mpuHelper = generator(this.bucket, fileName, options.uploadId, options.partsMap); + let partNumber = 1; + let promises = []; + try { + if (options.uploadId === undefined) { + await mpuHelper.initiateUpload(options.headers); + } + const startOrResumptionByte = mpuHelper.partsMap.size * chunkSize; + const readStream = (0, fs_1.createReadStream)(filePath, { + highWaterMark: chunkSize, + start: startOrResumptionByte, + }); + // p-limit only limits the number of running promises. We do not want to hold an entire + // large file in memory at once so promises acts a queue that will hold only maxQueueSize in memory. + for await (const curChunk of readStream) { + if (promises.length >= maxQueueSize) { + await Promise.all(promises); + promises = []; + } + promises.push(limit(() => mpuHelper.uploadPart(partNumber++, curChunk, options.validation))); + } + await Promise.all(promises); + return await mpuHelper.completeUpload(); + } + catch (e) { + if ((options.autoAbortFailure === undefined || options.autoAbortFailure) && + mpuHelper.uploadId) { + try { + await mpuHelper.abortUpload(); + return; + } + catch (e) { + throw new MultiPartUploadError(e.message, mpuHelper.uploadId, mpuHelper.partsMap); + } + } + throw new MultiPartUploadError(e.message, mpuHelper.uploadId, mpuHelper.partsMap); + } + } + async *getPathsFromDirectory(directory) { + const filesAndSubdirectories = await fs_1.promises.readdir(directory, { + withFileTypes: true, + }); + for (const curFileOrDirectory of filesAndSubdirectories) { + const fullPath = path.join(directory, curFileOrDirectory.name); + curFileOrDirectory.isDirectory() + ? yield* this.getPathsFromDirectory(fullPath) + : yield fullPath; + } + } +} +exports.TransferManager = TransferManager; + + +/***/ }), + +/***/ 4557: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = this && this.__createBinding || (Object.create ? function (o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { + enumerable: true, + get: function () { + return m[k]; + } + }; + } + Object.defineProperty(o, k2, desc); +} : function (o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +}); +var __setModuleDefault = this && this.__setModuleDefault || (Object.create ? function (o, v) { + Object.defineProperty(o, "default", { + enumerable: true, + value: v + }); +} : function (o, v) { + o["default"] = v; +}); +var __importStar = this && this.__importStar || function () { + var ownKeys = function (o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +}(); +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports.PassThroughShim = void 0; +exports.normalize = normalize; +exports.objectEntries = objectEntries; +exports.fixedEncodeURIComponent = fixedEncodeURIComponent; +exports.encodeURI = encodeURI; +exports.qsStringify = qsStringify; +exports.objectKeyToLowercase = objectKeyToLowercase; +exports.unicodeJSONStringify = unicodeJSONStringify; +exports.convertObjKeysToSnakeCase = convertObjKeysToSnakeCase; +exports.formatAsUTCISO = formatAsUTCISO; +exports.getRuntimeTrackingString = getRuntimeTrackingString; +exports.getUserAgentString = getUserAgentString; +exports.getDirName = getDirName; +exports.getModuleFormat = getModuleFormat; +const path = __importStar(__nccwpck_require__(6928)); +const querystring = __importStar(__nccwpck_require__(3480)); +const stream_1 = __nccwpck_require__(2203); +const url = __importStar(__nccwpck_require__(7016)); +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +const package_json_helper_cjs_1 = __nccwpck_require__(1969); +// Done to avoid a problem with mangling of identifiers when using esModuleInterop +const fileURLToPath = url.fileURLToPath; +const isEsm = false; +function normalize(optionsOrCallback, cb) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + return { + options, + callback + }; +} +/** + * Flatten an object into an Array of arrays, [[key, value], ..]. + * Implements Object.entries() for Node.js <8 + * @internal + */ +function objectEntries(obj) { + return Object.keys(obj).map(key => [key, obj[key]]); +} +/** + * Encode `str` with encodeURIComponent, plus these + * reserved characters: `! * ' ( )`. + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/encodeURIComponent| MDN: fixedEncodeURIComponent} + * + * @param {string} str The URI component to encode. + * @return {string} The encoded string. + */ +function fixedEncodeURIComponent(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, c => '%' + c.charCodeAt(0).toString(16).toUpperCase()); +} +/** + * URI encode `uri` for generating signed URLs, using fixedEncodeURIComponent. + * + * Encode every byte except `A-Z a-Z 0-9 ~ - . _`. + * + * @param {string} uri The URI to encode. + * @param [boolean=false] encodeSlash If `true`, the "/" character is not encoded. + * @return {string} The encoded string. + */ +function encodeURI(uri, encodeSlash) { + // Split the string by `/`, and conditionally rejoin them with either + // %2F if encodeSlash is `true`, or '/' if `false`. + return uri.split('/').map(fixedEncodeURIComponent).join(encodeSlash ? '%2F' : '/'); +} +/** + * Serialize an object to a URL query string using util.encodeURI(uri, true). + * @param {string} url The object to serialize. + * @return {string} Serialized string. + */ +function qsStringify(qs) { + return querystring.stringify(qs, '&', '=', { + encodeURIComponent: component => encodeURI(component, true) + }); +} +function objectKeyToLowercase(object) { + const newObj = {}; + for (let key of Object.keys(object)) { + const value = object[key]; + key = key.toLowerCase(); + newObj[key] = value; + } + return newObj; +} +/** + * JSON encode str, with unicode \u+ representation. + * @param {object} obj The object to encode. + * @return {string} Serialized string. + */ +function unicodeJSONStringify(obj) { + return JSON.stringify(obj).replace(/[\u0080-\uFFFF]/g, char => '\\u' + ('0000' + char.charCodeAt(0).toString(16)).slice(-4)); +} +/** + * Converts the given objects keys to snake_case + * @param {object} obj object to convert keys to snake case. + * @returns {object} object with keys converted to snake case. + */ +function convertObjKeysToSnakeCase(obj) { + if (obj instanceof Date || obj instanceof RegExp) { + return obj; + } + if (Array.isArray(obj)) { + return obj.map(convertObjKeysToSnakeCase); + } + if (obj instanceof Object) { + return Object.keys(obj).reduce((acc, cur) => { + const s = cur[0].toLocaleLowerCase() + cur.slice(1).replace(/([A-Z]+)/g, (match, p1) => { + return `_${p1.toLowerCase()}`; + }); + acc[s] = convertObjKeysToSnakeCase(obj[cur]); + return acc; + }, Object()); + } + return obj; +} +/** + * Formats the provided date object as a UTC ISO string. + * @param {Date} dateTimeToFormat date object to be formatted. + * @param {boolean} includeTime flag to include hours, minutes, seconds in output. + * @param {string} dateDelimiter delimiter between date components. + * @param {string} timeDelimiter delimiter between time components. + * @returns {string} UTC ISO format of provided date obect. + */ +function formatAsUTCISO(dateTimeToFormat, includeTime = false, dateDelimiter = '', timeDelimiter = '') { + const year = dateTimeToFormat.getUTCFullYear(); + const month = dateTimeToFormat.getUTCMonth() + 1; + const day = dateTimeToFormat.getUTCDate(); + const hour = dateTimeToFormat.getUTCHours(); + const minute = dateTimeToFormat.getUTCMinutes(); + const second = dateTimeToFormat.getUTCSeconds(); + let resultString = `${year.toString().padStart(4, '0')}${dateDelimiter}${month.toString().padStart(2, '0')}${dateDelimiter}${day.toString().padStart(2, '0')}`; + if (includeTime) { + resultString = `${resultString}T${hour.toString().padStart(2, '0')}${timeDelimiter}${minute.toString().padStart(2, '0')}${timeDelimiter}${second.toString().padStart(2, '0')}Z`; + } + return resultString; +} +/** + * Examines the runtime environment and returns the appropriate tracking string. + * @returns {string} metrics tracking string based on the current runtime environment. + */ +function getRuntimeTrackingString() { + if ( + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + globalThis.Deno && + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + globalThis.Deno.version && + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + globalThis.Deno.version.deno) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + return `gl-deno/${globalThis.Deno.version.deno}`; + } else { + return `gl-node/${process.versions.node}`; + } +} +/** + * Looks at package.json and creates the user-agent string to be applied to request headers. + * @returns {string} user agent string. + */ +function getUserAgentString() { + const pkg = (0, package_json_helper_cjs_1.getPackageJSON)(); + const hyphenatedPackageName = pkg.name.replace('@google-cloud', 'gcloud-node') // For legacy purposes. + .replace('/', '-'); // For UA spec-compliance purposes. + return hyphenatedPackageName + '/' + pkg.version; +} +function getDirName() { + let dirToUse = ''; + try { + dirToUse = __dirname; + } catch (e) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + dirToUse = __dirname; + } + return dirToUse; +} +function getModuleFormat() { + return isEsm ? 'ESM' : 'CJS'; +} +class PassThroughShim extends stream_1.PassThrough { + constructor() { + super(...arguments); + this.shouldEmitReading = true; + this.shouldEmitWriting = true; + } + _read(size) { + if (this.shouldEmitReading) { + this.emit('reading'); + this.shouldEmitReading = false; + } + super._read(size); + } + _write(chunk, encoding, callback) { + if (this.shouldEmitWriting) { + this.emit('writing'); + this.shouldEmitWriting = false; + } + // Per the nodejs documention, callback must be invoked on the next tick + process.nextTick(() => { + super._write(chunk, encoding, callback); + }); + } + _final(callback) { + // If the stream is empty (i.e. empty file) final will be invoked before _read / _write + // and we should still emit the proper events. + if (this.shouldEmitReading) { + this.emit('reading'); + this.shouldEmitReading = false; + } + if (this.shouldEmitWriting) { + this.emit('writing'); + this.shouldEmitWriting = false; + } + callback(null); + } +} +exports.PassThroughShim = PassThroughShim; + + +/***/ }), + +/***/ 3660: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __assign = (this && this.__assign) || function () { + __assign = Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; + }; + return __assign.apply(this, arguments); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.encode = encode; +exports.decodeEntity = decodeEntity; +exports.decode = decode; +var named_references_js_1 = __nccwpck_require__(6000); +var numeric_unicode_map_js_1 = __nccwpck_require__(1057); +var surrogate_pairs_js_1 = __nccwpck_require__(9718); +var allNamedReferences = __assign(__assign({}, named_references_js_1.namedReferences), { all: named_references_js_1.namedReferences.html5 }); +var encodeRegExps = { + specialChars: /[<>'"&]/g, + nonAscii: /[<>'"&\u0080-\uD7FF\uE000-\uFFFF\uDC00-\uDFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]?/g, + nonAsciiPrintable: /[<>'"&\x01-\x08\x11-\x15\x17-\x1F\x7f-\uD7FF\uE000-\uFFFF\uDC00-\uDFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]?/g, + nonAsciiPrintableOnly: /[\x01-\x08\x11-\x15\x17-\x1F\x7f-\uD7FF\uE000-\uFFFF\uDC00-\uDFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]?/g, + extensive: /[\x01-\x0c\x0e-\x1f\x21-\x2c\x2e-\x2f\x3a-\x40\x5b-\x60\x7b-\x7d\x7f-\uD7FF\uE000-\uFFFF\uDC00-\uDFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]?/g +}; +var defaultEncodeOptions = { + mode: 'specialChars', + level: 'all', + numeric: 'decimal' +}; +/** Encodes all the necessary (specified by `level`) characters in the text */ +function encode(text, _a) { + var _b = _a === void 0 ? defaultEncodeOptions : _a, _c = _b.mode, mode = _c === void 0 ? 'specialChars' : _c, _d = _b.numeric, numeric = _d === void 0 ? 'decimal' : _d, _e = _b.level, level = _e === void 0 ? 'all' : _e; + if (!text) { + return ''; + } + var encodeRegExp = encodeRegExps[mode]; + var references = allNamedReferences[level].characters; + var isHex = numeric === 'hexadecimal'; + return String.prototype.replace.call(text, encodeRegExp, function (input) { + var result = references[input]; + if (!result) { + var code = input.length > 1 ? (0, surrogate_pairs_js_1.getCodePoint)(input, 0) : input.charCodeAt(0); + result = (isHex ? '&#x' + code.toString(16) : '&#' + code) + ';'; + } + return result; + }); +} +var defaultDecodeOptions = { + scope: 'body', + level: 'all' +}; +var strict = /&(?:#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+);/g; +var attribute = /&(?:#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+)[;=]?/g; +var baseDecodeRegExps = { + xml: { + strict: strict, + attribute: attribute, + body: named_references_js_1.bodyRegExps.xml + }, + html4: { + strict: strict, + attribute: attribute, + body: named_references_js_1.bodyRegExps.html4 + }, + html5: { + strict: strict, + attribute: attribute, + body: named_references_js_1.bodyRegExps.html5 + } +}; +var decodeRegExps = __assign(__assign({}, baseDecodeRegExps), { all: baseDecodeRegExps.html5 }); +var fromCharCode = String.fromCharCode; +var outOfBoundsChar = fromCharCode(65533); +var defaultDecodeEntityOptions = { + level: 'all' +}; +function getDecodedEntity(entity, references, isAttribute, isStrict) { + var decodeResult = entity; + var decodeEntityLastChar = entity[entity.length - 1]; + if (isAttribute && decodeEntityLastChar === '=') { + decodeResult = entity; + } + else if (isStrict && decodeEntityLastChar !== ';') { + decodeResult = entity; + } + else { + var decodeResultByReference = references[entity]; + if (decodeResultByReference) { + decodeResult = decodeResultByReference; + } + else if (entity[0] === '&' && entity[1] === '#') { + var decodeSecondChar = entity[2]; + var decodeCode = decodeSecondChar == 'x' || decodeSecondChar == 'X' + ? parseInt(entity.substr(3), 16) + : parseInt(entity.substr(2)); + decodeResult = + decodeCode >= 0x10ffff + ? outOfBoundsChar + : decodeCode > 65535 + ? (0, surrogate_pairs_js_1.fromCodePoint)(decodeCode) + : fromCharCode(numeric_unicode_map_js_1.numericUnicodeMap[decodeCode] || decodeCode); + } + } + return decodeResult; +} +/** Decodes a single entity */ +function decodeEntity(entity, _a) { + var _b = _a === void 0 ? defaultDecodeEntityOptions : _a, _c = _b.level, level = _c === void 0 ? 'all' : _c; + if (!entity) { + return ''; + } + return getDecodedEntity(entity, allNamedReferences[level].entities, false, false); +} +/** Decodes all entities in the text */ +function decode(text, _a) { + var _b = _a === void 0 ? defaultDecodeOptions : _a, _c = _b.level, level = _c === void 0 ? 'all' : _c, _d = _b.scope, scope = _d === void 0 ? level === 'xml' ? 'strict' : 'body' : _d; + if (!text) { + return ''; + } + var decodeRegExp = decodeRegExps[level][scope]; + var references = allNamedReferences[level].entities; + var isAttribute = scope === 'attribute'; + var isStrict = scope === 'strict'; + return text.replace(decodeRegExp, function (entity) { return getDecodedEntity(entity, references, isAttribute, isStrict); }); +} +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 6000: +/***/ (function(__unused_webpack_module, exports) { + +"use strict"; + +var __assign = (this && this.__assign) || function () { + __assign = Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; + }; + return __assign.apply(this, arguments); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.namedReferences = exports.bodyRegExps = void 0; +// This file is autogenerated by tools/process-named-references.ts +var pairDivider = "~"; +var blockDivider = "~~"; +function generateNamedReferences(input, prev) { + var entities = {}; + var characters = {}; + var blocks = input.split(blockDivider); + var isOptionalBlock = false; + for (var i = 0; blocks.length > i; i++) { + var entries = blocks[i].split(pairDivider); + for (var j = 0; j < entries.length; j += 2) { + var entity = entries[j]; + var character = entries[j + 1]; + var fullEntity = '&' + entity + ';'; + entities[fullEntity] = character; + if (isOptionalBlock) { + entities['&' + entity] = character; + } + characters[character] = fullEntity; + } + isOptionalBlock = true; + } + return prev ? + { entities: __assign(__assign({}, entities), prev.entities), characters: __assign(__assign({}, characters), prev.characters) } : + { entities: entities, characters: characters }; +} +exports.bodyRegExps = { + xml: /&(?:#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+);?/g, + html4: /∉|&(?:nbsp|iexcl|cent|pound|curren|yen|brvbar|sect|uml|copy|ordf|laquo|not|shy|reg|macr|deg|plusmn|sup2|sup3|acute|micro|para|middot|cedil|sup1|ordm|raquo|frac14|frac12|frac34|iquest|Agrave|Aacute|Acirc|Atilde|Auml|Aring|AElig|Ccedil|Egrave|Eacute|Ecirc|Euml|Igrave|Iacute|Icirc|Iuml|ETH|Ntilde|Ograve|Oacute|Ocirc|Otilde|Ouml|times|Oslash|Ugrave|Uacute|Ucirc|Uuml|Yacute|THORN|szlig|agrave|aacute|acirc|atilde|auml|aring|aelig|ccedil|egrave|eacute|ecirc|euml|igrave|iacute|icirc|iuml|eth|ntilde|ograve|oacute|ocirc|otilde|ouml|divide|oslash|ugrave|uacute|ucirc|uuml|yacute|thorn|yuml|quot|amp|lt|gt|#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+);?/g, + html5: /·|℗|⋇|⪧|⩺|⋗|⦕|⩼|⪆|⥸|⋗|⋛|⪌|≷|≳|⪦|⩹|⋖|⋋|⋉|⥶|⩻|⦖|◃|⊴|◂|∉|⋹̸|⋵̸|∉|⋷|⋶|∌|∌|⋾|⋽|∥|⊠|⨱|⨰|&(?:AElig|AMP|Aacute|Acirc|Agrave|Aring|Atilde|Auml|COPY|Ccedil|ETH|Eacute|Ecirc|Egrave|Euml|GT|Iacute|Icirc|Igrave|Iuml|LT|Ntilde|Oacute|Ocirc|Ograve|Oslash|Otilde|Ouml|QUOT|REG|THORN|Uacute|Ucirc|Ugrave|Uuml|Yacute|aacute|acirc|acute|aelig|agrave|amp|aring|atilde|auml|brvbar|ccedil|cedil|cent|copy|curren|deg|divide|eacute|ecirc|egrave|eth|euml|frac12|frac14|frac34|gt|iacute|icirc|iexcl|igrave|iquest|iuml|laquo|lt|macr|micro|middot|nbsp|not|ntilde|oacute|ocirc|ograve|ordf|ordm|oslash|otilde|ouml|para|plusmn|pound|quot|raquo|reg|sect|shy|sup1|sup2|sup3|szlig|thorn|times|uacute|ucirc|ugrave|uml|uuml|yacute|yen|yuml|#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+);?/g +}; +exports.namedReferences = {}; +exports.namedReferences.xml = generateNamedReferences("lt~<~gt~>~quot~\"~apos~'~amp~&"); +exports.namedReferences.html4 = generateNamedReferences("apos~'~OElig~Œ~oelig~œ~Scaron~Š~scaron~š~Yuml~Ÿ~circ~ˆ~tilde~˜~ensp~ ~emsp~ ~thinsp~ ~zwnj~‌~zwj~‍~lrm~‎~rlm~‏~ndash~–~mdash~—~lsquo~‘~rsquo~’~sbquo~‚~ldquo~“~rdquo~”~bdquo~„~dagger~†~Dagger~‡~permil~‰~lsaquo~‹~rsaquo~›~euro~€~fnof~ƒ~Alpha~Α~Beta~Β~Gamma~Γ~Delta~Δ~Epsilon~Ε~Zeta~Ζ~Eta~Η~Theta~Θ~Iota~Ι~Kappa~Κ~Lambda~Λ~Mu~Μ~Nu~Ν~Xi~Ξ~Omicron~Ο~Pi~Π~Rho~Ρ~Sigma~Σ~Tau~Τ~Upsilon~Υ~Phi~Φ~Chi~Χ~Psi~Ψ~Omega~Ω~alpha~α~beta~β~gamma~γ~delta~δ~epsilon~ε~zeta~ζ~eta~η~theta~θ~iota~ι~kappa~κ~lambda~λ~mu~μ~nu~ν~xi~ξ~omicron~ο~pi~π~rho~ρ~sigmaf~ς~sigma~σ~tau~τ~upsilon~υ~phi~φ~chi~χ~psi~ψ~omega~ω~thetasym~ϑ~upsih~ϒ~piv~ϖ~bull~•~hellip~…~prime~′~Prime~″~oline~‾~frasl~⁄~weierp~℘~image~ℑ~real~ℜ~trade~™~alefsym~ℵ~larr~←~uarr~↑~rarr~→~darr~↓~harr~↔~crarr~↵~lArr~⇐~uArr~⇑~rArr~⇒~dArr~⇓~hArr~⇔~forall~∀~part~∂~exist~∃~empty~∅~nabla~∇~isin~∈~notin~∉~ni~∋~prod~∏~sum~∑~minus~−~lowast~∗~radic~√~prop~∝~infin~∞~ang~∠~and~∧~or~∨~cap~∩~cup~∪~int~∫~there4~∴~sim~∼~cong~≅~asymp~≈~ne~≠~equiv~≡~le~≤~ge~≥~sub~⊂~sup~⊃~nsub~⊄~sube~⊆~supe~⊇~oplus~⊕~otimes~⊗~perp~⊥~sdot~⋅~lceil~⌈~rceil~⌉~lfloor~⌊~rfloor~⌋~lang~〈~rang~〉~loz~◊~spades~♠~clubs~♣~hearts~♥~diams~♦~~nbsp~ ~iexcl~¡~cent~¢~pound~£~curren~¤~yen~¥~brvbar~¦~sect~§~uml~¨~copy~©~ordf~ª~laquo~«~not~¬~shy~­~reg~®~macr~¯~deg~°~plusmn~±~sup2~²~sup3~³~acute~´~micro~µ~para~¶~middot~·~cedil~¸~sup1~¹~ordm~º~raquo~»~frac14~¼~frac12~½~frac34~¾~iquest~¿~Agrave~À~Aacute~Á~Acirc~Â~Atilde~Ã~Auml~Ä~Aring~Å~AElig~Æ~Ccedil~Ç~Egrave~È~Eacute~É~Ecirc~Ê~Euml~Ë~Igrave~Ì~Iacute~Í~Icirc~Î~Iuml~Ï~ETH~Ð~Ntilde~Ñ~Ograve~Ò~Oacute~Ó~Ocirc~Ô~Otilde~Õ~Ouml~Ö~times~×~Oslash~Ø~Ugrave~Ù~Uacute~Ú~Ucirc~Û~Uuml~Ü~Yacute~Ý~THORN~Þ~szlig~ß~agrave~à~aacute~á~acirc~â~atilde~ã~auml~ä~aring~å~aelig~æ~ccedil~ç~egrave~è~eacute~é~ecirc~ê~euml~ë~igrave~ì~iacute~í~icirc~î~iuml~ï~eth~ð~ntilde~ñ~ograve~ò~oacute~ó~ocirc~ô~otilde~õ~ouml~ö~divide~÷~oslash~ø~ugrave~ù~uacute~ú~ucirc~û~uuml~ü~yacute~ý~thorn~þ~yuml~ÿ~quot~\"~amp~&~lt~<~gt~>"); +exports.namedReferences.html5 = generateNamedReferences("Abreve~Ă~Acy~А~Afr~𝔄~Amacr~Ā~And~⩓~Aogon~Ą~Aopf~𝔸~ApplyFunction~⁡~Ascr~𝒜~Assign~≔~Backslash~∖~Barv~⫧~Barwed~⌆~Bcy~Б~Because~∵~Bernoullis~ℬ~Bfr~𝔅~Bopf~𝔹~Breve~˘~Bscr~ℬ~Bumpeq~≎~CHcy~Ч~Cacute~Ć~Cap~⋒~CapitalDifferentialD~ⅅ~Cayleys~ℭ~Ccaron~Č~Ccirc~Ĉ~Cconint~∰~Cdot~Ċ~Cedilla~¸~CenterDot~·~Cfr~ℭ~CircleDot~⊙~CircleMinus~⊖~CirclePlus~⊕~CircleTimes~⊗~ClockwiseContourIntegral~∲~CloseCurlyDoubleQuote~”~CloseCurlyQuote~’~Colon~∷~Colone~⩴~Congruent~≡~Conint~∯~ContourIntegral~∮~Copf~ℂ~Coproduct~∐~CounterClockwiseContourIntegral~∳~Cross~⨯~Cscr~𝒞~Cup~⋓~CupCap~≍~DD~ⅅ~DDotrahd~⤑~DJcy~Ђ~DScy~Ѕ~DZcy~Џ~Darr~↡~Dashv~⫤~Dcaron~Ď~Dcy~Д~Del~∇~Dfr~𝔇~DiacriticalAcute~´~DiacriticalDot~˙~DiacriticalDoubleAcute~˝~DiacriticalGrave~`~DiacriticalTilde~˜~Diamond~⋄~DifferentialD~ⅆ~Dopf~𝔻~Dot~¨~DotDot~⃜~DotEqual~≐~DoubleContourIntegral~∯~DoubleDot~¨~DoubleDownArrow~⇓~DoubleLeftArrow~⇐~DoubleLeftRightArrow~⇔~DoubleLeftTee~⫤~DoubleLongLeftArrow~⟸~DoubleLongLeftRightArrow~⟺~DoubleLongRightArrow~⟹~DoubleRightArrow~⇒~DoubleRightTee~⊨~DoubleUpArrow~⇑~DoubleUpDownArrow~⇕~DoubleVerticalBar~∥~DownArrow~↓~DownArrowBar~⤓~DownArrowUpArrow~⇵~DownBreve~̑~DownLeftRightVector~⥐~DownLeftTeeVector~⥞~DownLeftVector~↽~DownLeftVectorBar~⥖~DownRightTeeVector~⥟~DownRightVector~⇁~DownRightVectorBar~⥗~DownTee~⊤~DownTeeArrow~↧~Downarrow~⇓~Dscr~𝒟~Dstrok~Đ~ENG~Ŋ~Ecaron~Ě~Ecy~Э~Edot~Ė~Efr~𝔈~Element~∈~Emacr~Ē~EmptySmallSquare~◻~EmptyVerySmallSquare~▫~Eogon~Ę~Eopf~𝔼~Equal~⩵~EqualTilde~≂~Equilibrium~⇌~Escr~ℰ~Esim~⩳~Exists~∃~ExponentialE~ⅇ~Fcy~Ф~Ffr~𝔉~FilledSmallSquare~◼~FilledVerySmallSquare~▪~Fopf~𝔽~ForAll~∀~Fouriertrf~ℱ~Fscr~ℱ~GJcy~Ѓ~Gammad~Ϝ~Gbreve~Ğ~Gcedil~Ģ~Gcirc~Ĝ~Gcy~Г~Gdot~Ġ~Gfr~𝔊~Gg~⋙~Gopf~𝔾~GreaterEqual~≥~GreaterEqualLess~⋛~GreaterFullEqual~≧~GreaterGreater~⪢~GreaterLess~≷~GreaterSlantEqual~⩾~GreaterTilde~≳~Gscr~𝒢~Gt~≫~HARDcy~Ъ~Hacek~ˇ~Hat~^~Hcirc~Ĥ~Hfr~ℌ~HilbertSpace~ℋ~Hopf~ℍ~HorizontalLine~─~Hscr~ℋ~Hstrok~Ħ~HumpDownHump~≎~HumpEqual~≏~IEcy~Е~IJlig~IJ~IOcy~Ё~Icy~И~Idot~İ~Ifr~ℑ~Im~ℑ~Imacr~Ī~ImaginaryI~ⅈ~Implies~⇒~Int~∬~Integral~∫~Intersection~⋂~InvisibleComma~⁣~InvisibleTimes~⁢~Iogon~Į~Iopf~𝕀~Iscr~ℐ~Itilde~Ĩ~Iukcy~І~Jcirc~Ĵ~Jcy~Й~Jfr~𝔍~Jopf~𝕁~Jscr~𝒥~Jsercy~Ј~Jukcy~Є~KHcy~Х~KJcy~Ќ~Kcedil~Ķ~Kcy~К~Kfr~𝔎~Kopf~𝕂~Kscr~𝒦~LJcy~Љ~Lacute~Ĺ~Lang~⟪~Laplacetrf~ℒ~Larr~↞~Lcaron~Ľ~Lcedil~Ļ~Lcy~Л~LeftAngleBracket~⟨~LeftArrow~←~LeftArrowBar~⇤~LeftArrowRightArrow~⇆~LeftCeiling~⌈~LeftDoubleBracket~⟦~LeftDownTeeVector~⥡~LeftDownVector~⇃~LeftDownVectorBar~⥙~LeftFloor~⌊~LeftRightArrow~↔~LeftRightVector~⥎~LeftTee~⊣~LeftTeeArrow~↤~LeftTeeVector~⥚~LeftTriangle~⊲~LeftTriangleBar~⧏~LeftTriangleEqual~⊴~LeftUpDownVector~⥑~LeftUpTeeVector~⥠~LeftUpVector~↿~LeftUpVectorBar~⥘~LeftVector~↼~LeftVectorBar~⥒~Leftarrow~⇐~Leftrightarrow~⇔~LessEqualGreater~⋚~LessFullEqual~≦~LessGreater~≶~LessLess~⪡~LessSlantEqual~⩽~LessTilde~≲~Lfr~𝔏~Ll~⋘~Lleftarrow~⇚~Lmidot~Ŀ~LongLeftArrow~⟵~LongLeftRightArrow~⟷~LongRightArrow~⟶~Longleftarrow~⟸~Longleftrightarrow~⟺~Longrightarrow~⟹~Lopf~𝕃~LowerLeftArrow~↙~LowerRightArrow~↘~Lscr~ℒ~Lsh~↰~Lstrok~Ł~Lt~≪~Map~⤅~Mcy~М~MediumSpace~ ~Mellintrf~ℳ~Mfr~𝔐~MinusPlus~∓~Mopf~𝕄~Mscr~ℳ~NJcy~Њ~Nacute~Ń~Ncaron~Ň~Ncedil~Ņ~Ncy~Н~NegativeMediumSpace~​~NegativeThickSpace~​~NegativeThinSpace~​~NegativeVeryThinSpace~​~NestedGreaterGreater~≫~NestedLessLess~≪~NewLine~\n~Nfr~𝔑~NoBreak~⁠~NonBreakingSpace~ ~Nopf~ℕ~Not~⫬~NotCongruent~≢~NotCupCap~≭~NotDoubleVerticalBar~∦~NotElement~∉~NotEqual~≠~NotEqualTilde~≂̸~NotExists~∄~NotGreater~≯~NotGreaterEqual~≱~NotGreaterFullEqual~≧̸~NotGreaterGreater~≫̸~NotGreaterLess~≹~NotGreaterSlantEqual~⩾̸~NotGreaterTilde~≵~NotHumpDownHump~≎̸~NotHumpEqual~≏̸~NotLeftTriangle~⋪~NotLeftTriangleBar~⧏̸~NotLeftTriangleEqual~⋬~NotLess~≮~NotLessEqual~≰~NotLessGreater~≸~NotLessLess~≪̸~NotLessSlantEqual~⩽̸~NotLessTilde~≴~NotNestedGreaterGreater~⪢̸~NotNestedLessLess~⪡̸~NotPrecedes~⊀~NotPrecedesEqual~⪯̸~NotPrecedesSlantEqual~⋠~NotReverseElement~∌~NotRightTriangle~⋫~NotRightTriangleBar~⧐̸~NotRightTriangleEqual~⋭~NotSquareSubset~⊏̸~NotSquareSubsetEqual~⋢~NotSquareSuperset~⊐̸~NotSquareSupersetEqual~⋣~NotSubset~⊂⃒~NotSubsetEqual~⊈~NotSucceeds~⊁~NotSucceedsEqual~⪰̸~NotSucceedsSlantEqual~⋡~NotSucceedsTilde~≿̸~NotSuperset~⊃⃒~NotSupersetEqual~⊉~NotTilde~≁~NotTildeEqual~≄~NotTildeFullEqual~≇~NotTildeTilde~≉~NotVerticalBar~∤~Nscr~𝒩~Ocy~О~Odblac~Ő~Ofr~𝔒~Omacr~Ō~Oopf~𝕆~OpenCurlyDoubleQuote~“~OpenCurlyQuote~‘~Or~⩔~Oscr~𝒪~Otimes~⨷~OverBar~‾~OverBrace~⏞~OverBracket~⎴~OverParenthesis~⏜~PartialD~∂~Pcy~П~Pfr~𝔓~PlusMinus~±~Poincareplane~ℌ~Popf~ℙ~Pr~⪻~Precedes~≺~PrecedesEqual~⪯~PrecedesSlantEqual~≼~PrecedesTilde~≾~Product~∏~Proportion~∷~Proportional~∝~Pscr~𝒫~Qfr~𝔔~Qopf~ℚ~Qscr~𝒬~RBarr~⤐~Racute~Ŕ~Rang~⟫~Rarr~↠~Rarrtl~⤖~Rcaron~Ř~Rcedil~Ŗ~Rcy~Р~Re~ℜ~ReverseElement~∋~ReverseEquilibrium~⇋~ReverseUpEquilibrium~⥯~Rfr~ℜ~RightAngleBracket~⟩~RightArrow~→~RightArrowBar~⇥~RightArrowLeftArrow~⇄~RightCeiling~⌉~RightDoubleBracket~⟧~RightDownTeeVector~⥝~RightDownVector~⇂~RightDownVectorBar~⥕~RightFloor~⌋~RightTee~⊢~RightTeeArrow~↦~RightTeeVector~⥛~RightTriangle~⊳~RightTriangleBar~⧐~RightTriangleEqual~⊵~RightUpDownVector~⥏~RightUpTeeVector~⥜~RightUpVector~↾~RightUpVectorBar~⥔~RightVector~⇀~RightVectorBar~⥓~Rightarrow~⇒~Ropf~ℝ~RoundImplies~⥰~Rrightarrow~⇛~Rscr~ℛ~Rsh~↱~RuleDelayed~⧴~SHCHcy~Щ~SHcy~Ш~SOFTcy~Ь~Sacute~Ś~Sc~⪼~Scedil~Ş~Scirc~Ŝ~Scy~С~Sfr~𝔖~ShortDownArrow~↓~ShortLeftArrow~←~ShortRightArrow~→~ShortUpArrow~↑~SmallCircle~∘~Sopf~𝕊~Sqrt~√~Square~□~SquareIntersection~⊓~SquareSubset~⊏~SquareSubsetEqual~⊑~SquareSuperset~⊐~SquareSupersetEqual~⊒~SquareUnion~⊔~Sscr~𝒮~Star~⋆~Sub~⋐~Subset~⋐~SubsetEqual~⊆~Succeeds~≻~SucceedsEqual~⪰~SucceedsSlantEqual~≽~SucceedsTilde~≿~SuchThat~∋~Sum~∑~Sup~⋑~Superset~⊃~SupersetEqual~⊇~Supset~⋑~TRADE~™~TSHcy~Ћ~TScy~Ц~Tab~\t~Tcaron~Ť~Tcedil~Ţ~Tcy~Т~Tfr~𝔗~Therefore~∴~ThickSpace~  ~ThinSpace~ ~Tilde~∼~TildeEqual~≃~TildeFullEqual~≅~TildeTilde~≈~Topf~𝕋~TripleDot~⃛~Tscr~𝒯~Tstrok~Ŧ~Uarr~↟~Uarrocir~⥉~Ubrcy~Ў~Ubreve~Ŭ~Ucy~У~Udblac~Ű~Ufr~𝔘~Umacr~Ū~UnderBar~_~UnderBrace~⏟~UnderBracket~⎵~UnderParenthesis~⏝~Union~⋃~UnionPlus~⊎~Uogon~Ų~Uopf~𝕌~UpArrow~↑~UpArrowBar~⤒~UpArrowDownArrow~⇅~UpDownArrow~↕~UpEquilibrium~⥮~UpTee~⊥~UpTeeArrow~↥~Uparrow~⇑~Updownarrow~⇕~UpperLeftArrow~↖~UpperRightArrow~↗~Upsi~ϒ~Uring~Ů~Uscr~𝒰~Utilde~Ũ~VDash~⊫~Vbar~⫫~Vcy~В~Vdash~⊩~Vdashl~⫦~Vee~⋁~Verbar~‖~Vert~‖~VerticalBar~∣~VerticalLine~|~VerticalSeparator~❘~VerticalTilde~≀~VeryThinSpace~ ~Vfr~𝔙~Vopf~𝕍~Vscr~𝒱~Vvdash~⊪~Wcirc~Ŵ~Wedge~⋀~Wfr~𝔚~Wopf~𝕎~Wscr~𝒲~Xfr~𝔛~Xopf~𝕏~Xscr~𝒳~YAcy~Я~YIcy~Ї~YUcy~Ю~Ycirc~Ŷ~Ycy~Ы~Yfr~𝔜~Yopf~𝕐~Yscr~𝒴~ZHcy~Ж~Zacute~Ź~Zcaron~Ž~Zcy~З~Zdot~Ż~ZeroWidthSpace~​~Zfr~ℨ~Zopf~ℤ~Zscr~𝒵~abreve~ă~ac~∾~acE~∾̳~acd~∿~acy~а~af~⁡~afr~𝔞~aleph~ℵ~amacr~ā~amalg~⨿~andand~⩕~andd~⩜~andslope~⩘~andv~⩚~ange~⦤~angle~∠~angmsd~∡~angmsdaa~⦨~angmsdab~⦩~angmsdac~⦪~angmsdad~⦫~angmsdae~⦬~angmsdaf~⦭~angmsdag~⦮~angmsdah~⦯~angrt~∟~angrtvb~⊾~angrtvbd~⦝~angsph~∢~angst~Å~angzarr~⍼~aogon~ą~aopf~𝕒~ap~≈~apE~⩰~apacir~⩯~ape~≊~apid~≋~approx~≈~approxeq~≊~ascr~𝒶~ast~*~asympeq~≍~awconint~∳~awint~⨑~bNot~⫭~backcong~≌~backepsilon~϶~backprime~‵~backsim~∽~backsimeq~⋍~barvee~⊽~barwed~⌅~barwedge~⌅~bbrk~⎵~bbrktbrk~⎶~bcong~≌~bcy~б~becaus~∵~because~∵~bemptyv~⦰~bepsi~϶~bernou~ℬ~beth~ℶ~between~≬~bfr~𝔟~bigcap~⋂~bigcirc~◯~bigcup~⋃~bigodot~⨀~bigoplus~⨁~bigotimes~⨂~bigsqcup~⨆~bigstar~★~bigtriangledown~▽~bigtriangleup~△~biguplus~⨄~bigvee~⋁~bigwedge~⋀~bkarow~⤍~blacklozenge~⧫~blacksquare~▪~blacktriangle~▴~blacktriangledown~▾~blacktriangleleft~◂~blacktriangleright~▸~blank~␣~blk12~▒~blk14~░~blk34~▓~block~█~bne~=⃥~bnequiv~≡⃥~bnot~⌐~bopf~𝕓~bot~⊥~bottom~⊥~bowtie~⋈~boxDL~╗~boxDR~╔~boxDl~╖~boxDr~╓~boxH~═~boxHD~╦~boxHU~╩~boxHd~╤~boxHu~╧~boxUL~╝~boxUR~╚~boxUl~╜~boxUr~╙~boxV~║~boxVH~╬~boxVL~╣~boxVR~╠~boxVh~╫~boxVl~╢~boxVr~╟~boxbox~⧉~boxdL~╕~boxdR~╒~boxdl~┐~boxdr~┌~boxh~─~boxhD~╥~boxhU~╨~boxhd~┬~boxhu~┴~boxminus~⊟~boxplus~⊞~boxtimes~⊠~boxuL~╛~boxuR~╘~boxul~┘~boxur~└~boxv~│~boxvH~╪~boxvL~╡~boxvR~╞~boxvh~┼~boxvl~┤~boxvr~├~bprime~‵~breve~˘~bscr~𝒷~bsemi~⁏~bsim~∽~bsime~⋍~bsol~\\~bsolb~⧅~bsolhsub~⟈~bullet~•~bump~≎~bumpE~⪮~bumpe~≏~bumpeq~≏~cacute~ć~capand~⩄~capbrcup~⩉~capcap~⩋~capcup~⩇~capdot~⩀~caps~∩︀~caret~⁁~caron~ˇ~ccaps~⩍~ccaron~č~ccirc~ĉ~ccups~⩌~ccupssm~⩐~cdot~ċ~cemptyv~⦲~centerdot~·~cfr~𝔠~chcy~ч~check~✓~checkmark~✓~cir~○~cirE~⧃~circeq~≗~circlearrowleft~↺~circlearrowright~↻~circledR~®~circledS~Ⓢ~circledast~⊛~circledcirc~⊚~circleddash~⊝~cire~≗~cirfnint~⨐~cirmid~⫯~cirscir~⧂~clubsuit~♣~colon~:~colone~≔~coloneq~≔~comma~,~commat~@~comp~∁~compfn~∘~complement~∁~complexes~ℂ~congdot~⩭~conint~∮~copf~𝕔~coprod~∐~copysr~℗~cross~✗~cscr~𝒸~csub~⫏~csube~⫑~csup~⫐~csupe~⫒~ctdot~⋯~cudarrl~⤸~cudarrr~⤵~cuepr~⋞~cuesc~⋟~cularr~↶~cularrp~⤽~cupbrcap~⩈~cupcap~⩆~cupcup~⩊~cupdot~⊍~cupor~⩅~cups~∪︀~curarr~↷~curarrm~⤼~curlyeqprec~⋞~curlyeqsucc~⋟~curlyvee~⋎~curlywedge~⋏~curvearrowleft~↶~curvearrowright~↷~cuvee~⋎~cuwed~⋏~cwconint~∲~cwint~∱~cylcty~⌭~dHar~⥥~daleth~ℸ~dash~‐~dashv~⊣~dbkarow~⤏~dblac~˝~dcaron~ď~dcy~д~dd~ⅆ~ddagger~‡~ddarr~⇊~ddotseq~⩷~demptyv~⦱~dfisht~⥿~dfr~𝔡~dharl~⇃~dharr~⇂~diam~⋄~diamond~⋄~diamondsuit~♦~die~¨~digamma~ϝ~disin~⋲~div~÷~divideontimes~⋇~divonx~⋇~djcy~ђ~dlcorn~⌞~dlcrop~⌍~dollar~$~dopf~𝕕~dot~˙~doteq~≐~doteqdot~≑~dotminus~∸~dotplus~∔~dotsquare~⊡~doublebarwedge~⌆~downarrow~↓~downdownarrows~⇊~downharpoonleft~⇃~downharpoonright~⇂~drbkarow~⤐~drcorn~⌟~drcrop~⌌~dscr~𝒹~dscy~ѕ~dsol~⧶~dstrok~đ~dtdot~⋱~dtri~▿~dtrif~▾~duarr~⇵~duhar~⥯~dwangle~⦦~dzcy~џ~dzigrarr~⟿~eDDot~⩷~eDot~≑~easter~⩮~ecaron~ě~ecir~≖~ecolon~≕~ecy~э~edot~ė~ee~ⅇ~efDot~≒~efr~𝔢~eg~⪚~egs~⪖~egsdot~⪘~el~⪙~elinters~⏧~ell~ℓ~els~⪕~elsdot~⪗~emacr~ē~emptyset~∅~emptyv~∅~emsp13~ ~emsp14~ ~eng~ŋ~eogon~ę~eopf~𝕖~epar~⋕~eparsl~⧣~eplus~⩱~epsi~ε~epsiv~ϵ~eqcirc~≖~eqcolon~≕~eqsim~≂~eqslantgtr~⪖~eqslantless~⪕~equals~=~equest~≟~equivDD~⩸~eqvparsl~⧥~erDot~≓~erarr~⥱~escr~ℯ~esdot~≐~esim~≂~excl~!~expectation~ℰ~exponentiale~ⅇ~fallingdotseq~≒~fcy~ф~female~♀~ffilig~ffi~fflig~ff~ffllig~ffl~ffr~𝔣~filig~fi~fjlig~fj~flat~♭~fllig~fl~fltns~▱~fopf~𝕗~fork~⋔~forkv~⫙~fpartint~⨍~frac13~⅓~frac15~⅕~frac16~⅙~frac18~⅛~frac23~⅔~frac25~⅖~frac35~⅗~frac38~⅜~frac45~⅘~frac56~⅚~frac58~⅝~frac78~⅞~frown~⌢~fscr~𝒻~gE~≧~gEl~⪌~gacute~ǵ~gammad~ϝ~gap~⪆~gbreve~ğ~gcirc~ĝ~gcy~г~gdot~ġ~gel~⋛~geq~≥~geqq~≧~geqslant~⩾~ges~⩾~gescc~⪩~gesdot~⪀~gesdoto~⪂~gesdotol~⪄~gesl~⋛︀~gesles~⪔~gfr~𝔤~gg~≫~ggg~⋙~gimel~ℷ~gjcy~ѓ~gl~≷~glE~⪒~gla~⪥~glj~⪤~gnE~≩~gnap~⪊~gnapprox~⪊~gne~⪈~gneq~⪈~gneqq~≩~gnsim~⋧~gopf~𝕘~grave~`~gscr~ℊ~gsim~≳~gsime~⪎~gsiml~⪐~gtcc~⪧~gtcir~⩺~gtdot~⋗~gtlPar~⦕~gtquest~⩼~gtrapprox~⪆~gtrarr~⥸~gtrdot~⋗~gtreqless~⋛~gtreqqless~⪌~gtrless~≷~gtrsim~≳~gvertneqq~≩︀~gvnE~≩︀~hairsp~ ~half~½~hamilt~ℋ~hardcy~ъ~harrcir~⥈~harrw~↭~hbar~ℏ~hcirc~ĥ~heartsuit~♥~hercon~⊹~hfr~𝔥~hksearow~⤥~hkswarow~⤦~hoarr~⇿~homtht~∻~hookleftarrow~↩~hookrightarrow~↪~hopf~𝕙~horbar~―~hscr~𝒽~hslash~ℏ~hstrok~ħ~hybull~⁃~hyphen~‐~ic~⁣~icy~и~iecy~е~iff~⇔~ifr~𝔦~ii~ⅈ~iiiint~⨌~iiint~∭~iinfin~⧜~iiota~℩~ijlig~ij~imacr~ī~imagline~ℐ~imagpart~ℑ~imath~ı~imof~⊷~imped~Ƶ~in~∈~incare~℅~infintie~⧝~inodot~ı~intcal~⊺~integers~ℤ~intercal~⊺~intlarhk~⨗~intprod~⨼~iocy~ё~iogon~į~iopf~𝕚~iprod~⨼~iscr~𝒾~isinE~⋹~isindot~⋵~isins~⋴~isinsv~⋳~isinv~∈~it~⁢~itilde~ĩ~iukcy~і~jcirc~ĵ~jcy~й~jfr~𝔧~jmath~ȷ~jopf~𝕛~jscr~𝒿~jsercy~ј~jukcy~є~kappav~ϰ~kcedil~ķ~kcy~к~kfr~𝔨~kgreen~ĸ~khcy~х~kjcy~ќ~kopf~𝕜~kscr~𝓀~lAarr~⇚~lAtail~⤛~lBarr~⤎~lE~≦~lEg~⪋~lHar~⥢~lacute~ĺ~laemptyv~⦴~lagran~ℒ~langd~⦑~langle~⟨~lap~⪅~larrb~⇤~larrbfs~⤟~larrfs~⤝~larrhk~↩~larrlp~↫~larrpl~⤹~larrsim~⥳~larrtl~↢~lat~⪫~latail~⤙~late~⪭~lates~⪭︀~lbarr~⤌~lbbrk~❲~lbrace~{~lbrack~[~lbrke~⦋~lbrksld~⦏~lbrkslu~⦍~lcaron~ľ~lcedil~ļ~lcub~{~lcy~л~ldca~⤶~ldquor~„~ldrdhar~⥧~ldrushar~⥋~ldsh~↲~leftarrow~←~leftarrowtail~↢~leftharpoondown~↽~leftharpoonup~↼~leftleftarrows~⇇~leftrightarrow~↔~leftrightarrows~⇆~leftrightharpoons~⇋~leftrightsquigarrow~↭~leftthreetimes~⋋~leg~⋚~leq~≤~leqq~≦~leqslant~⩽~les~⩽~lescc~⪨~lesdot~⩿~lesdoto~⪁~lesdotor~⪃~lesg~⋚︀~lesges~⪓~lessapprox~⪅~lessdot~⋖~lesseqgtr~⋚~lesseqqgtr~⪋~lessgtr~≶~lesssim~≲~lfisht~⥼~lfr~𝔩~lg~≶~lgE~⪑~lhard~↽~lharu~↼~lharul~⥪~lhblk~▄~ljcy~љ~ll~≪~llarr~⇇~llcorner~⌞~llhard~⥫~lltri~◺~lmidot~ŀ~lmoust~⎰~lmoustache~⎰~lnE~≨~lnap~⪉~lnapprox~⪉~lne~⪇~lneq~⪇~lneqq~≨~lnsim~⋦~loang~⟬~loarr~⇽~lobrk~⟦~longleftarrow~⟵~longleftrightarrow~⟷~longmapsto~⟼~longrightarrow~⟶~looparrowleft~↫~looparrowright~↬~lopar~⦅~lopf~𝕝~loplus~⨭~lotimes~⨴~lowbar~_~lozenge~◊~lozf~⧫~lpar~(~lparlt~⦓~lrarr~⇆~lrcorner~⌟~lrhar~⇋~lrhard~⥭~lrtri~⊿~lscr~𝓁~lsh~↰~lsim~≲~lsime~⪍~lsimg~⪏~lsqb~[~lsquor~‚~lstrok~ł~ltcc~⪦~ltcir~⩹~ltdot~⋖~lthree~⋋~ltimes~⋉~ltlarr~⥶~ltquest~⩻~ltrPar~⦖~ltri~◃~ltrie~⊴~ltrif~◂~lurdshar~⥊~luruhar~⥦~lvertneqq~≨︀~lvnE~≨︀~mDDot~∺~male~♂~malt~✠~maltese~✠~map~↦~mapsto~↦~mapstodown~↧~mapstoleft~↤~mapstoup~↥~marker~▮~mcomma~⨩~mcy~м~measuredangle~∡~mfr~𝔪~mho~℧~mid~∣~midast~*~midcir~⫰~minusb~⊟~minusd~∸~minusdu~⨪~mlcp~⫛~mldr~…~mnplus~∓~models~⊧~mopf~𝕞~mp~∓~mscr~𝓂~mstpos~∾~multimap~⊸~mumap~⊸~nGg~⋙̸~nGt~≫⃒~nGtv~≫̸~nLeftarrow~⇍~nLeftrightarrow~⇎~nLl~⋘̸~nLt~≪⃒~nLtv~≪̸~nRightarrow~⇏~nVDash~⊯~nVdash~⊮~nacute~ń~nang~∠⃒~nap~≉~napE~⩰̸~napid~≋̸~napos~ʼn~napprox~≉~natur~♮~natural~♮~naturals~ℕ~nbump~≎̸~nbumpe~≏̸~ncap~⩃~ncaron~ň~ncedil~ņ~ncong~≇~ncongdot~⩭̸~ncup~⩂~ncy~н~neArr~⇗~nearhk~⤤~nearr~↗~nearrow~↗~nedot~≐̸~nequiv~≢~nesear~⤨~nesim~≂̸~nexist~∄~nexists~∄~nfr~𝔫~ngE~≧̸~nge~≱~ngeq~≱~ngeqq~≧̸~ngeqslant~⩾̸~nges~⩾̸~ngsim~≵~ngt~≯~ngtr~≯~nhArr~⇎~nharr~↮~nhpar~⫲~nis~⋼~nisd~⋺~niv~∋~njcy~њ~nlArr~⇍~nlE~≦̸~nlarr~↚~nldr~‥~nle~≰~nleftarrow~↚~nleftrightarrow~↮~nleq~≰~nleqq~≦̸~nleqslant~⩽̸~nles~⩽̸~nless~≮~nlsim~≴~nlt~≮~nltri~⋪~nltrie~⋬~nmid~∤~nopf~𝕟~notinE~⋹̸~notindot~⋵̸~notinva~∉~notinvb~⋷~notinvc~⋶~notni~∌~notniva~∌~notnivb~⋾~notnivc~⋽~npar~∦~nparallel~∦~nparsl~⫽⃥~npart~∂̸~npolint~⨔~npr~⊀~nprcue~⋠~npre~⪯̸~nprec~⊀~npreceq~⪯̸~nrArr~⇏~nrarr~↛~nrarrc~⤳̸~nrarrw~↝̸~nrightarrow~↛~nrtri~⋫~nrtrie~⋭~nsc~⊁~nsccue~⋡~nsce~⪰̸~nscr~𝓃~nshortmid~∤~nshortparallel~∦~nsim~≁~nsime~≄~nsimeq~≄~nsmid~∤~nspar~∦~nsqsube~⋢~nsqsupe~⋣~nsubE~⫅̸~nsube~⊈~nsubset~⊂⃒~nsubseteq~⊈~nsubseteqq~⫅̸~nsucc~⊁~nsucceq~⪰̸~nsup~⊅~nsupE~⫆̸~nsupe~⊉~nsupset~⊃⃒~nsupseteq~⊉~nsupseteqq~⫆̸~ntgl~≹~ntlg~≸~ntriangleleft~⋪~ntrianglelefteq~⋬~ntriangleright~⋫~ntrianglerighteq~⋭~num~#~numero~№~numsp~ ~nvDash~⊭~nvHarr~⤄~nvap~≍⃒~nvdash~⊬~nvge~≥⃒~nvgt~>⃒~nvinfin~⧞~nvlArr~⤂~nvle~≤⃒~nvlt~<⃒~nvltrie~⊴⃒~nvrArr~⤃~nvrtrie~⊵⃒~nvsim~∼⃒~nwArr~⇖~nwarhk~⤣~nwarr~↖~nwarrow~↖~nwnear~⤧~oS~Ⓢ~oast~⊛~ocir~⊚~ocy~о~odash~⊝~odblac~ő~odiv~⨸~odot~⊙~odsold~⦼~ofcir~⦿~ofr~𝔬~ogon~˛~ogt~⧁~ohbar~⦵~ohm~Ω~oint~∮~olarr~↺~olcir~⦾~olcross~⦻~olt~⧀~omacr~ō~omid~⦶~ominus~⊖~oopf~𝕠~opar~⦷~operp~⦹~orarr~↻~ord~⩝~order~ℴ~orderof~ℴ~origof~⊶~oror~⩖~orslope~⩗~orv~⩛~oscr~ℴ~osol~⊘~otimesas~⨶~ovbar~⌽~par~∥~parallel~∥~parsim~⫳~parsl~⫽~pcy~п~percnt~%~period~.~pertenk~‱~pfr~𝔭~phiv~ϕ~phmmat~ℳ~phone~☎~pitchfork~⋔~planck~ℏ~planckh~ℎ~plankv~ℏ~plus~+~plusacir~⨣~plusb~⊞~pluscir~⨢~plusdo~∔~plusdu~⨥~pluse~⩲~plussim~⨦~plustwo~⨧~pm~±~pointint~⨕~popf~𝕡~pr~≺~prE~⪳~prap~⪷~prcue~≼~pre~⪯~prec~≺~precapprox~⪷~preccurlyeq~≼~preceq~⪯~precnapprox~⪹~precneqq~⪵~precnsim~⋨~precsim~≾~primes~ℙ~prnE~⪵~prnap~⪹~prnsim~⋨~profalar~⌮~profline~⌒~profsurf~⌓~propto~∝~prsim~≾~prurel~⊰~pscr~𝓅~puncsp~ ~qfr~𝔮~qint~⨌~qopf~𝕢~qprime~⁗~qscr~𝓆~quaternions~ℍ~quatint~⨖~quest~?~questeq~≟~rAarr~⇛~rAtail~⤜~rBarr~⤏~rHar~⥤~race~∽̱~racute~ŕ~raemptyv~⦳~rangd~⦒~range~⦥~rangle~⟩~rarrap~⥵~rarrb~⇥~rarrbfs~⤠~rarrc~⤳~rarrfs~⤞~rarrhk~↪~rarrlp~↬~rarrpl~⥅~rarrsim~⥴~rarrtl~↣~rarrw~↝~ratail~⤚~ratio~∶~rationals~ℚ~rbarr~⤍~rbbrk~❳~rbrace~}~rbrack~]~rbrke~⦌~rbrksld~⦎~rbrkslu~⦐~rcaron~ř~rcedil~ŗ~rcub~}~rcy~р~rdca~⤷~rdldhar~⥩~rdquor~”~rdsh~↳~realine~ℛ~realpart~ℜ~reals~ℝ~rect~▭~rfisht~⥽~rfr~𝔯~rhard~⇁~rharu~⇀~rharul~⥬~rhov~ϱ~rightarrow~→~rightarrowtail~↣~rightharpoondown~⇁~rightharpoonup~⇀~rightleftarrows~⇄~rightleftharpoons~⇌~rightrightarrows~⇉~rightsquigarrow~↝~rightthreetimes~⋌~ring~˚~risingdotseq~≓~rlarr~⇄~rlhar~⇌~rmoust~⎱~rmoustache~⎱~rnmid~⫮~roang~⟭~roarr~⇾~robrk~⟧~ropar~⦆~ropf~𝕣~roplus~⨮~rotimes~⨵~rpar~)~rpargt~⦔~rppolint~⨒~rrarr~⇉~rscr~𝓇~rsh~↱~rsqb~]~rsquor~’~rthree~⋌~rtimes~⋊~rtri~▹~rtrie~⊵~rtrif~▸~rtriltri~⧎~ruluhar~⥨~rx~℞~sacute~ś~sc~≻~scE~⪴~scap~⪸~sccue~≽~sce~⪰~scedil~ş~scirc~ŝ~scnE~⪶~scnap~⪺~scnsim~⋩~scpolint~⨓~scsim~≿~scy~с~sdotb~⊡~sdote~⩦~seArr~⇘~searhk~⤥~searr~↘~searrow~↘~semi~;~seswar~⤩~setminus~∖~setmn~∖~sext~✶~sfr~𝔰~sfrown~⌢~sharp~♯~shchcy~щ~shcy~ш~shortmid~∣~shortparallel~∥~sigmav~ς~simdot~⩪~sime~≃~simeq~≃~simg~⪞~simgE~⪠~siml~⪝~simlE~⪟~simne~≆~simplus~⨤~simrarr~⥲~slarr~←~smallsetminus~∖~smashp~⨳~smeparsl~⧤~smid~∣~smile~⌣~smt~⪪~smte~⪬~smtes~⪬︀~softcy~ь~sol~/~solb~⧄~solbar~⌿~sopf~𝕤~spadesuit~♠~spar~∥~sqcap~⊓~sqcaps~⊓︀~sqcup~⊔~sqcups~⊔︀~sqsub~⊏~sqsube~⊑~sqsubset~⊏~sqsubseteq~⊑~sqsup~⊐~sqsupe~⊒~sqsupset~⊐~sqsupseteq~⊒~squ~□~square~□~squarf~▪~squf~▪~srarr~→~sscr~𝓈~ssetmn~∖~ssmile~⌣~sstarf~⋆~star~☆~starf~★~straightepsilon~ϵ~straightphi~ϕ~strns~¯~subE~⫅~subdot~⪽~subedot~⫃~submult~⫁~subnE~⫋~subne~⊊~subplus~⪿~subrarr~⥹~subset~⊂~subseteq~⊆~subseteqq~⫅~subsetneq~⊊~subsetneqq~⫋~subsim~⫇~subsub~⫕~subsup~⫓~succ~≻~succapprox~⪸~succcurlyeq~≽~succeq~⪰~succnapprox~⪺~succneqq~⪶~succnsim~⋩~succsim~≿~sung~♪~supE~⫆~supdot~⪾~supdsub~⫘~supedot~⫄~suphsol~⟉~suphsub~⫗~suplarr~⥻~supmult~⫂~supnE~⫌~supne~⊋~supplus~⫀~supset~⊃~supseteq~⊇~supseteqq~⫆~supsetneq~⊋~supsetneqq~⫌~supsim~⫈~supsub~⫔~supsup~⫖~swArr~⇙~swarhk~⤦~swarr~↙~swarrow~↙~swnwar~⤪~target~⌖~tbrk~⎴~tcaron~ť~tcedil~ţ~tcy~т~tdot~⃛~telrec~⌕~tfr~𝔱~therefore~∴~thetav~ϑ~thickapprox~≈~thicksim~∼~thkap~≈~thksim~∼~timesb~⊠~timesbar~⨱~timesd~⨰~tint~∭~toea~⤨~top~⊤~topbot~⌶~topcir~⫱~topf~𝕥~topfork~⫚~tosa~⤩~tprime~‴~triangle~▵~triangledown~▿~triangleleft~◃~trianglelefteq~⊴~triangleq~≜~triangleright~▹~trianglerighteq~⊵~tridot~◬~trie~≜~triminus~⨺~triplus~⨹~trisb~⧍~tritime~⨻~trpezium~⏢~tscr~𝓉~tscy~ц~tshcy~ћ~tstrok~ŧ~twixt~≬~twoheadleftarrow~↞~twoheadrightarrow~↠~uHar~⥣~ubrcy~ў~ubreve~ŭ~ucy~у~udarr~⇅~udblac~ű~udhar~⥮~ufisht~⥾~ufr~𝔲~uharl~↿~uharr~↾~uhblk~▀~ulcorn~⌜~ulcorner~⌜~ulcrop~⌏~ultri~◸~umacr~ū~uogon~ų~uopf~𝕦~uparrow~↑~updownarrow~↕~upharpoonleft~↿~upharpoonright~↾~uplus~⊎~upsi~υ~upuparrows~⇈~urcorn~⌝~urcorner~⌝~urcrop~⌎~uring~ů~urtri~◹~uscr~𝓊~utdot~⋰~utilde~ũ~utri~▵~utrif~▴~uuarr~⇈~uwangle~⦧~vArr~⇕~vBar~⫨~vBarv~⫩~vDash~⊨~vangrt~⦜~varepsilon~ϵ~varkappa~ϰ~varnothing~∅~varphi~ϕ~varpi~ϖ~varpropto~∝~varr~↕~varrho~ϱ~varsigma~ς~varsubsetneq~⊊︀~varsubsetneqq~⫋︀~varsupsetneq~⊋︀~varsupsetneqq~⫌︀~vartheta~ϑ~vartriangleleft~⊲~vartriangleright~⊳~vcy~в~vdash~⊢~vee~∨~veebar~⊻~veeeq~≚~vellip~⋮~verbar~|~vert~|~vfr~𝔳~vltri~⊲~vnsub~⊂⃒~vnsup~⊃⃒~vopf~𝕧~vprop~∝~vrtri~⊳~vscr~𝓋~vsubnE~⫋︀~vsubne~⊊︀~vsupnE~⫌︀~vsupne~⊋︀~vzigzag~⦚~wcirc~ŵ~wedbar~⩟~wedge~∧~wedgeq~≙~wfr~𝔴~wopf~𝕨~wp~℘~wr~≀~wreath~≀~wscr~𝓌~xcap~⋂~xcirc~◯~xcup~⋃~xdtri~▽~xfr~𝔵~xhArr~⟺~xharr~⟷~xlArr~⟸~xlarr~⟵~xmap~⟼~xnis~⋻~xodot~⨀~xopf~𝕩~xoplus~⨁~xotime~⨂~xrArr~⟹~xrarr~⟶~xscr~𝓍~xsqcup~⨆~xuplus~⨄~xutri~△~xvee~⋁~xwedge~⋀~yacy~я~ycirc~ŷ~ycy~ы~yfr~𝔶~yicy~ї~yopf~𝕪~yscr~𝓎~yucy~ю~zacute~ź~zcaron~ž~zcy~з~zdot~ż~zeetrf~ℨ~zfr~𝔷~zhcy~ж~zigrarr~⇝~zopf~𝕫~zscr~𝓏~~AMP~&~COPY~©~GT~>~LT~<~QUOT~\"~REG~®", exports.namedReferences['html4']); +//# sourceMappingURL=named-references.js.map + +/***/ }), + +/***/ 1057: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.numericUnicodeMap = void 0; +exports.numericUnicodeMap = { + 0: 65533, + 128: 8364, + 130: 8218, + 131: 402, + 132: 8222, + 133: 8230, + 134: 8224, + 135: 8225, + 136: 710, + 137: 8240, + 138: 352, + 139: 8249, + 140: 338, + 142: 381, + 145: 8216, + 146: 8217, + 147: 8220, + 148: 8221, + 149: 8226, + 150: 8211, + 151: 8212, + 152: 732, + 153: 8482, + 154: 353, + 155: 8250, + 156: 339, + 158: 382, + 159: 376 +}; +//# sourceMappingURL=numeric-unicode-map.js.map + +/***/ }), + +/***/ 9718: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.highSurrogateTo = exports.highSurrogateFrom = exports.getCodePoint = exports.fromCodePoint = void 0; +exports.fromCodePoint = String.fromCodePoint || + function (astralCodePoint) { + return String.fromCharCode(Math.floor((astralCodePoint - 0x10000) / 0x400) + 0xd800, ((astralCodePoint - 0x10000) % 0x400) + 0xdc00); + }; +// @ts-expect-error - String.prototype.codePointAt might not exist in older node versions +exports.getCodePoint = String.prototype.codePointAt + ? function (input, position) { + return input.codePointAt(position); + } + : function (input, position) { + return (input.charCodeAt(position) - 0xd800) * 0x400 + input.charCodeAt(position + 1) - 0xdc00 + 0x10000; + }; +exports.highSurrogateFrom = 0xd800; +exports.highSurrogateTo = 0xdbff; +//# sourceMappingURL=surrogate-pairs.js.map + +/***/ }), + +/***/ 1969: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/* eslint-disable node/no-missing-require */ + +function getPackageJSON() { + return __nccwpck_require__(2721); +} + +exports.getPackageJSON = getPackageJSON; + + /***/ }), /***/ 4012: @@ -65404,6 +107257,30 @@ module.exports = /*#__PURE__*/JSON.parse('{"name":"@actions/cache","version":"4. /***/ }), +/***/ 2721: +/***/ ((module) => { + +"use strict"; +module.exports = /*#__PURE__*/JSON.parse('{"name":"@google-cloud/storage","description":"Cloud Storage Client Library for Node.js","version":"7.16.0","license":"Apache-2.0","author":"Google Inc.","engines":{"node":">=14"},"repository":"googleapis/nodejs-storage","main":"./build/cjs/src/index.js","types":"./build/cjs/src/index.d.ts","type":"module","exports":{".":{"import":{"types":"./build/esm/src/index.d.ts","default":"./build/esm/src/index.js"},"require":{"types":"./build/cjs/src/index.d.ts","default":"./build/cjs/src/index.js"}}},"files":["build/cjs/src","build/cjs/package.json","!build/cjs/src/**/*.map","build/esm/src","!build/esm/src/**/*.map"],"keywords":["google apis client","google api client","google apis","google api","google","google cloud platform","google cloud","cloud","google storage","storage"],"scripts":{"all-test":"npm test && npm run system-test && npm run samples-test","benchwrapper":"node bin/benchwrapper.js","check":"gts check","clean":"rm -rf build/","compile:cjs":"tsc -p ./tsconfig.cjs.json","compile:esm":"tsc -p .","compile":"npm run compile:cjs && npm run compile:esm","conformance-test":"mocha --parallel build/cjs/conformance-test/ --require build/cjs/conformance-test/globalHooks.js","docs-test":"linkinator docs","docs":"jsdoc -c .jsdoc.json","fix":"gts fix","lint":"gts check","postcompile":"cp ./src/package-json-helper.cjs ./build/cjs/src && cp ./src/package-json-helper.cjs ./build/esm/src","postcompile:cjs":"babel --plugins gapic-tools/build/src/replaceImportMetaUrl,gapic-tools/build/src/toggleESMFlagVariable build/cjs/src/util.js -o build/cjs/src/util.js && cp internal-tooling/helpers/package.cjs.json build/cjs/package.json","precompile":"rm -rf build/","preconformance-test":"npm run compile:cjs -- --sourceMap","predocs-test":"npm run docs","predocs":"npm run compile:cjs -- --sourceMap","prelint":"cd samples; npm link ../; npm install","prepare":"npm run compile","presystem-test:esm":"npm run compile:esm","presystem-test":"npm run compile -- --sourceMap","pretest":"npm run compile -- --sourceMap","samples-test":"npm link && cd samples/ && npm link ../ && npm test && cd ../","system-test:esm":"mocha build/esm/system-test --timeout 600000 --exit","system-test":"mocha build/cjs/system-test --timeout 600000 --exit","test":"c8 mocha build/cjs/test"},"dependencies":{"@google-cloud/paginator":"^5.0.0","@google-cloud/projectify":"^4.0.0","@google-cloud/promisify":"<4.1.0","abort-controller":"^3.0.0","async-retry":"^1.3.3","duplexify":"^4.1.3","fast-xml-parser":"^4.4.1","gaxios":"^6.0.2","google-auth-library":"^9.6.3","html-entities":"^2.5.2","mime":"^3.0.0","p-limit":"^3.0.1","retry-request":"^7.0.0","teeny-request":"^9.0.0","uuid":"^8.0.0"},"devDependencies":{"@babel/cli":"^7.22.10","@babel/core":"^7.22.11","@google-cloud/pubsub":"^4.0.0","@grpc/grpc-js":"^1.0.3","@grpc/proto-loader":"^0.7.0","@types/async-retry":"^1.4.3","@types/duplexify":"^3.6.4","@types/mime":"^3.0.0","@types/mocha":"^9.1.1","@types/mockery":"^1.4.29","@types/node":"^22.0.0","@types/node-fetch":"^2.1.3","@types/proxyquire":"^1.3.28","@types/request":"^2.48.4","@types/sinon":"^17.0.0","@types/tmp":"0.2.6","@types/uuid":"^8.0.0","@types/yargs":"^17.0.10","c8":"^9.0.0","form-data":"^4.0.0","gapic-tools":"^0.4.0","gts":"^5.0.0","jsdoc":"^4.0.0","jsdoc-fresh":"^3.0.0","jsdoc-region-tag":"^3.0.0","linkinator":"^3.0.0","mocha":"^9.2.2","mockery":"^2.1.0","nock":"~13.5.0","node-fetch":"^2.6.7","pack-n-play":"^2.0.0","proxyquire":"^2.1.3","sinon":"^18.0.0","nise":"6.0.0","path-to-regexp":"6.3.0","tmp":"^0.2.0","typescript":"^5.1.6","yargs":"^17.3.1"}}'); + +/***/ }), + +/***/ 6495: +/***/ ((module) => { + +"use strict"; +module.exports = /*#__PURE__*/JSON.parse('{"name":"gaxios","version":"6.7.1","description":"A simple common HTTP client specifically for Google APIs and services.","main":"build/src/index.js","types":"build/src/index.d.ts","files":["build/src"],"scripts":{"lint":"gts check","test":"c8 mocha build/test","presystem-test":"npm run compile","system-test":"mocha build/system-test --timeout 80000","compile":"tsc -p .","fix":"gts fix","prepare":"npm run compile","pretest":"npm run compile","webpack":"webpack","prebrowser-test":"npm run compile","browser-test":"node build/browser-test/browser-test-runner.js","docs":"compodoc src/","docs-test":"linkinator docs","predocs-test":"npm run docs","samples-test":"cd samples/ && npm link ../ && npm test && cd ../","prelint":"cd samples; npm link ../; npm install","clean":"gts clean","precompile":"gts clean"},"repository":"googleapis/gaxios","keywords":["google"],"engines":{"node":">=14"},"author":"Google, LLC","license":"Apache-2.0","devDependencies":{"@babel/plugin-proposal-private-methods":"^7.18.6","@compodoc/compodoc":"1.1.19","@types/cors":"^2.8.6","@types/express":"^4.16.1","@types/extend":"^3.0.1","@types/mocha":"^9.0.0","@types/multiparty":"0.0.36","@types/mv":"^2.1.0","@types/ncp":"^2.0.1","@types/node":"^20.0.0","@types/node-fetch":"^2.5.7","@types/sinon":"^17.0.0","@types/tmp":"0.2.6","@types/uuid":"^10.0.0","abort-controller":"^3.0.0","assert":"^2.0.0","browserify":"^17.0.0","c8":"^8.0.0","cheerio":"1.0.0-rc.10","cors":"^2.8.5","execa":"^5.0.0","express":"^4.16.4","form-data":"^4.0.0","gts":"^5.0.0","is-docker":"^2.0.0","karma":"^6.0.0","karma-chrome-launcher":"^3.0.0","karma-coverage":"^2.0.0","karma-firefox-launcher":"^2.0.0","karma-mocha":"^2.0.0","karma-remap-coverage":"^0.1.5","karma-sourcemap-loader":"^0.4.0","karma-webpack":"5.0.0","linkinator":"^3.0.0","mocha":"^8.0.0","multiparty":"^4.2.1","mv":"^2.1.1","ncp":"^2.0.0","nock":"^13.0.0","null-loader":"^4.0.0","puppeteer":"^19.0.0","sinon":"^18.0.0","stream-browserify":"^3.0.0","tmp":"0.2.3","ts-loader":"^8.0.0","typescript":"^5.1.6","webpack":"^5.35.0","webpack-cli":"^4.0.0"},"dependencies":{"extend":"^3.0.2","https-proxy-agent":"^7.0.1","is-stream":"^2.0.0","node-fetch":"^2.6.9","uuid":"^9.0.1"}}'); + +/***/ }), + +/***/ 6066: +/***/ ((module) => { + +"use strict"; +module.exports = /*#__PURE__*/JSON.parse('{"name":"google-auth-library","version":"9.15.1","author":"Google Inc.","description":"Google APIs Authentication Client Library for Node.js","engines":{"node":">=14"},"main":"./build/src/index.js","types":"./build/src/index.d.ts","repository":"googleapis/google-auth-library-nodejs.git","keywords":["google","api","google apis","client","client library"],"dependencies":{"base64-js":"^1.3.0","ecdsa-sig-formatter":"^1.0.11","gaxios":"^6.1.1","gcp-metadata":"^6.1.0","gtoken":"^7.0.0","jws":"^4.0.0"},"devDependencies":{"@types/base64-js":"^1.2.5","@types/chai":"^4.1.7","@types/jws":"^3.1.0","@types/mocha":"^9.0.0","@types/mv":"^2.1.0","@types/ncp":"^2.0.1","@types/node":"^20.4.2","@types/sinon":"^17.0.0","assert-rejects":"^1.0.0","c8":"^8.0.0","chai":"^4.2.0","cheerio":"1.0.0-rc.12","codecov":"^3.0.2","engine.io":"6.6.2","gts":"^5.0.0","is-docker":"^2.0.0","jsdoc":"^4.0.0","jsdoc-fresh":"^3.0.0","jsdoc-region-tag":"^3.0.0","karma":"^6.0.0","karma-chrome-launcher":"^3.0.0","karma-coverage":"^2.0.0","karma-firefox-launcher":"^2.0.0","karma-mocha":"^2.0.0","karma-sourcemap-loader":"^0.4.0","karma-webpack":"5.0.0","keypair":"^1.0.4","linkinator":"^4.0.0","mocha":"^9.2.2","mv":"^2.1.1","ncp":"^2.0.0","nock":"^13.0.0","null-loader":"^4.0.0","pdfmake":"0.2.12","puppeteer":"^21.0.0","sinon":"^18.0.0","ts-loader":"^8.0.0","typescript":"^5.1.6","webpack":"^5.21.2","webpack-cli":"^4.0.0"},"files":["build/src","!build/src/**/*.map"],"scripts":{"test":"c8 mocha build/test","clean":"gts clean","prepare":"npm run compile","lint":"gts check","compile":"tsc -p .","fix":"gts fix","pretest":"npm run compile -- --sourceMap","docs":"jsdoc -c .jsdoc.json","samples-setup":"cd samples/ && npm link ../ && npm run setup && cd ../","samples-test":"cd samples/ && npm link ../ && npm test && cd ../","system-test":"mocha build/system-test --timeout 60000","presystem-test":"npm run compile -- --sourceMap","webpack":"webpack","browser-test":"karma start","docs-test":"linkinator docs","predocs-test":"npm run docs","prelint":"cd samples; npm link ../; npm install","precompile":"gts clean"},"license":"Apache-2.0"}'); + +/***/ }), + /***/ 1813: /***/ ((module) => { diff --git a/dist/restore/index.js b/dist/restore/index.js index 95f7849..0b4ce20 100644 --- a/dist/restore/index.js +++ b/dist/restore/index.js @@ -7792,7 +7792,7 @@ exports.isTokenCredential = isTokenCredential; Object.defineProperty(exports, "__esModule", ({ value: true })); -var uuid = __nccwpck_require__(3534); +var uuid = __nccwpck_require__(2048); var util = __nccwpck_require__(9023); var tslib = __nccwpck_require__(470); var xml2js = __nccwpck_require__(758); @@ -14157,652 +14157,6 @@ var __createBinding; }); -/***/ }), - -/***/ 3534: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -Object.defineProperty(exports, "v1", ({ - enumerable: true, - get: function () { - return _v.default; - } -})); -Object.defineProperty(exports, "v3", ({ - enumerable: true, - get: function () { - return _v2.default; - } -})); -Object.defineProperty(exports, "v4", ({ - enumerable: true, - get: function () { - return _v3.default; - } -})); -Object.defineProperty(exports, "v5", ({ - enumerable: true, - get: function () { - return _v4.default; - } -})); -Object.defineProperty(exports, "NIL", ({ - enumerable: true, - get: function () { - return _nil.default; - } -})); -Object.defineProperty(exports, "version", ({ - enumerable: true, - get: function () { - return _version.default; - } -})); -Object.defineProperty(exports, "validate", ({ - enumerable: true, - get: function () { - return _validate.default; - } -})); -Object.defineProperty(exports, "stringify", ({ - enumerable: true, - get: function () { - return _stringify.default; - } -})); -Object.defineProperty(exports, "parse", ({ - enumerable: true, - get: function () { - return _parse.default; - } -})); - -var _v = _interopRequireDefault(__nccwpck_require__(3417)); - -var _v2 = _interopRequireDefault(__nccwpck_require__(2855)); - -var _v3 = _interopRequireDefault(__nccwpck_require__(7974)); - -var _v4 = _interopRequireDefault(__nccwpck_require__(6165)); - -var _nil = _interopRequireDefault(__nccwpck_require__(7441)); - -var _version = _interopRequireDefault(__nccwpck_require__(5962)); - -var _validate = _interopRequireDefault(__nccwpck_require__(1690)); - -var _stringify = _interopRequireDefault(__nccwpck_require__(9651)); - -var _parse = _interopRequireDefault(__nccwpck_require__(6221)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/***/ }), - -/***/ 7370: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function md5(bytes) { - if (Array.isArray(bytes)) { - bytes = Buffer.from(bytes); - } else if (typeof bytes === 'string') { - bytes = Buffer.from(bytes, 'utf8'); - } - - return _crypto.default.createHash('md5').update(bytes).digest(); -} - -var _default = md5; -exports["default"] = _default; - -/***/ }), - -/***/ 7441: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; -var _default = '00000000-0000-0000-0000-000000000000'; -exports["default"] = _default; - -/***/ }), - -/***/ 6221: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _validate = _interopRequireDefault(__nccwpck_require__(1690)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function parse(uuid) { - if (!(0, _validate.default)(uuid)) { - throw TypeError('Invalid UUID'); - } - - let v; - const arr = new Uint8Array(16); // Parse ########-....-....-....-............ - - arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; - arr[1] = v >>> 16 & 0xff; - arr[2] = v >>> 8 & 0xff; - arr[3] = v & 0xff; // Parse ........-####-....-....-............ - - arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; - arr[5] = v & 0xff; // Parse ........-....-####-....-............ - - arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; - arr[7] = v & 0xff; // Parse ........-....-....-####-............ - - arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; - arr[9] = v & 0xff; // Parse ........-....-....-....-############ - // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) - - arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; - arr[11] = v / 0x100000000 & 0xff; - arr[12] = v >>> 24 & 0xff; - arr[13] = v >>> 16 & 0xff; - arr[14] = v >>> 8 & 0xff; - arr[15] = v & 0xff; - return arr; -} - -var _default = parse; -exports["default"] = _default; - -/***/ }), - -/***/ 8689: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; -var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; -exports["default"] = _default; - -/***/ }), - -/***/ 6299: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = rng; - -var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate - -let poolPtr = rnds8Pool.length; - -function rng() { - if (poolPtr > rnds8Pool.length - 16) { - _crypto.default.randomFillSync(rnds8Pool); - - poolPtr = 0; - } - - return rnds8Pool.slice(poolPtr, poolPtr += 16); -} - -/***/ }), - -/***/ 8821: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function sha1(bytes) { - if (Array.isArray(bytes)) { - bytes = Buffer.from(bytes); - } else if (typeof bytes === 'string') { - bytes = Buffer.from(bytes, 'utf8'); - } - - return _crypto.default.createHash('sha1').update(bytes).digest(); -} - -var _default = sha1; -exports["default"] = _default; - -/***/ }), - -/***/ 9651: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _validate = _interopRequireDefault(__nccwpck_require__(1690)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Convert array of 16 byte values to UUID string format of the form: - * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX - */ -const byteToHex = []; - -for (let i = 0; i < 256; ++i) { - byteToHex.push((i + 0x100).toString(16).substr(1)); -} - -function stringify(arr, offset = 0) { - // Note: Be careful editing this code! It's been tuned for performance - // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 - const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one - // of the following: - // - One or more input array values don't map to a hex octet (leading to - // "undefined" in the uuid) - // - Invalid input values for the RFC `version` or `variant` fields - - if (!(0, _validate.default)(uuid)) { - throw TypeError('Stringified UUID is invalid'); - } - - return uuid; -} - -var _default = stringify; -exports["default"] = _default; - -/***/ }), - -/***/ 3417: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _rng = _interopRequireDefault(__nccwpck_require__(6299)); - -var _stringify = _interopRequireDefault(__nccwpck_require__(9651)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -// **`v1()` - Generate time-based UUID** -// -// Inspired by https://github.com/LiosK/UUID.js -// and http://docs.python.org/library/uuid.html -let _nodeId; - -let _clockseq; // Previous uuid creation time - - -let _lastMSecs = 0; -let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details - -function v1(options, buf, offset) { - let i = buf && offset || 0; - const b = buf || new Array(16); - options = options || {}; - let node = options.node || _nodeId; - let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not - // specified. We do this lazily to minimize issues related to insufficient - // system entropy. See #189 - - if (node == null || clockseq == null) { - const seedBytes = options.random || (options.rng || _rng.default)(); - - if (node == null) { - // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) - node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; - } - - if (clockseq == null) { - // Per 4.2.2, randomize (14 bit) clockseq - clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; - } - } // UUID timestamps are 100 nano-second units since the Gregorian epoch, - // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so - // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' - // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. - - - let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock - // cycle to simulate higher resolution clock - - let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) - - const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression - - if (dt < 0 && options.clockseq === undefined) { - clockseq = clockseq + 1 & 0x3fff; - } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new - // time interval - - - if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { - nsecs = 0; - } // Per 4.2.1.2 Throw error if too many uuids are requested - - - if (nsecs >= 10000) { - throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); - } - - _lastMSecs = msecs; - _lastNSecs = nsecs; - _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch - - msecs += 12219292800000; // `time_low` - - const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; - b[i++] = tl >>> 24 & 0xff; - b[i++] = tl >>> 16 & 0xff; - b[i++] = tl >>> 8 & 0xff; - b[i++] = tl & 0xff; // `time_mid` - - const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; - b[i++] = tmh >>> 8 & 0xff; - b[i++] = tmh & 0xff; // `time_high_and_version` - - b[i++] = tmh >>> 24 & 0xf | 0x10; // include version - - b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) - - b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` - - b[i++] = clockseq & 0xff; // `node` - - for (let n = 0; n < 6; ++n) { - b[i + n] = node[n]; - } - - return buf || (0, _stringify.default)(b); -} - -var _default = v1; -exports["default"] = _default; - -/***/ }), - -/***/ 2855: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _v = _interopRequireDefault(__nccwpck_require__(8132)); - -var _md = _interopRequireDefault(__nccwpck_require__(7370)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -const v3 = (0, _v.default)('v3', 0x30, _md.default); -var _default = v3; -exports["default"] = _default; - -/***/ }), - -/***/ 8132: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = _default; -exports.URL = exports.DNS = void 0; - -var _stringify = _interopRequireDefault(__nccwpck_require__(9651)); - -var _parse = _interopRequireDefault(__nccwpck_require__(6221)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function stringToBytes(str) { - str = unescape(encodeURIComponent(str)); // UTF8 escape - - const bytes = []; - - for (let i = 0; i < str.length; ++i) { - bytes.push(str.charCodeAt(i)); - } - - return bytes; -} - -const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; -exports.DNS = DNS; -const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; -exports.URL = URL; - -function _default(name, version, hashfunc) { - function generateUUID(value, namespace, buf, offset) { - if (typeof value === 'string') { - value = stringToBytes(value); - } - - if (typeof namespace === 'string') { - namespace = (0, _parse.default)(namespace); - } - - if (namespace.length !== 16) { - throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); - } // Compute hash of namespace and value, Per 4.3 - // Future: Use spread syntax when supported on all platforms, e.g. `bytes = - // hashfunc([...namespace, ... value])` - - - let bytes = new Uint8Array(16 + value.length); - bytes.set(namespace); - bytes.set(value, namespace.length); - bytes = hashfunc(bytes); - bytes[6] = bytes[6] & 0x0f | version; - bytes[8] = bytes[8] & 0x3f | 0x80; - - if (buf) { - offset = offset || 0; - - for (let i = 0; i < 16; ++i) { - buf[offset + i] = bytes[i]; - } - - return buf; - } - - return (0, _stringify.default)(bytes); - } // Function#name is not settable on some platforms (#270) - - - try { - generateUUID.name = name; // eslint-disable-next-line no-empty - } catch (err) {} // For CommonJS default export support - - - generateUUID.DNS = DNS; - generateUUID.URL = URL; - return generateUUID; -} - -/***/ }), - -/***/ 7974: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _rng = _interopRequireDefault(__nccwpck_require__(6299)); - -var _stringify = _interopRequireDefault(__nccwpck_require__(9651)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function v4(options, buf, offset) { - options = options || {}; - - const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` - - - rnds[6] = rnds[6] & 0x0f | 0x40; - rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided - - if (buf) { - offset = offset || 0; - - for (let i = 0; i < 16; ++i) { - buf[offset + i] = rnds[i]; - } - - return buf; - } - - return (0, _stringify.default)(rnds); -} - -var _default = v4; -exports["default"] = _default; - -/***/ }), - -/***/ 6165: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _v = _interopRequireDefault(__nccwpck_require__(8132)); - -var _sha = _interopRequireDefault(__nccwpck_require__(8821)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -const v5 = (0, _v.default)('v5', 0x50, _sha.default); -var _default = v5; -exports["default"] = _default; - -/***/ }), - -/***/ 1690: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _regex = _interopRequireDefault(__nccwpck_require__(8689)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function validate(uuid) { - return typeof uuid === 'string' && _regex.default.test(uuid); -} - -var _default = validate; -exports["default"] = _default; - -/***/ }), - -/***/ 5962: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _validate = _interopRequireDefault(__nccwpck_require__(1690)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function version(uuid) { - if (!(0, _validate.default)(uuid)) { - throw TypeError('Invalid UUID'); - } - - return parseInt(uuid.substr(14, 1), 16); -} - -var _default = version; -exports["default"] = _default; - /***/ }), /***/ 5862: @@ -42639,6 +41993,619 @@ var __createBinding; }); +/***/ }), + +/***/ 9469: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/*! + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ResourceStream = exports.paginator = exports.Paginator = void 0; +/*! + * @module common/paginator + */ +const arrify = __nccwpck_require__(6251); +const extend = __nccwpck_require__(3860); +const resource_stream_1 = __nccwpck_require__(7618); +Object.defineProperty(exports, "ResourceStream", ({ enumerable: true, get: function () { return resource_stream_1.ResourceStream; } })); +/*! Developer Documentation + * + * paginator is used to auto-paginate `nextQuery` methods as well as + * streamifying them. + * + * Before: + * + * search.query('done=true', function(err, results, nextQuery) { + * search.query(nextQuery, function(err, results, nextQuery) {}); + * }); + * + * After: + * + * search.query('done=true', function(err, results) {}); + * + * Methods to extend should be written to accept callbacks and return a + * `nextQuery`. + */ +class Paginator { + /** + * Cache the original method, then overwrite it on the Class's prototype. + * + * @param {function} Class - The parent class of the methods to extend. + * @param {string|string[]} methodNames - Name(s) of the methods to extend. + */ + // tslint:disable-next-line:variable-name + extend(Class, methodNames) { + methodNames = arrify(methodNames); + methodNames.forEach(methodName => { + const originalMethod = Class.prototype[methodName]; + // map the original method to a private member + Class.prototype[methodName + '_'] = originalMethod; + // overwrite the original to auto-paginate + /* eslint-disable @typescript-eslint/no-explicit-any */ + Class.prototype[methodName] = function (...args) { + const parsedArguments = paginator.parseArguments_(args); + return paginator.run_(parsedArguments, originalMethod.bind(this)); + }; + }); + } + /** + * Wraps paginated API calls in a readable object stream. + * + * This method simply calls the nextQuery recursively, emitting results to a + * stream. The stream ends when `nextQuery` is null. + * + * `maxResults` will act as a cap for how many results are fetched and emitted + * to the stream. + * + * @param {string} methodName - Name of the method to streamify. + * @return {function} - Wrapped function. + */ + /* eslint-disable @typescript-eslint/no-explicit-any */ + streamify(methodName) { + return function ( + /* eslint-disable @typescript-eslint/no-explicit-any */ + ...args) { + const parsedArguments = paginator.parseArguments_(args); + const originalMethod = this[methodName + '_'] || this[methodName]; + return paginator.runAsStream_(parsedArguments, originalMethod.bind(this)); + }; + } + /** + * Parse a pseudo-array `arguments` for a query and callback. + * + * @param {array} args - The original `arguments` pseduo-array that the original + * method received. + */ + /* eslint-disable @typescript-eslint/no-explicit-any */ + parseArguments_(args) { + let query; + let autoPaginate = true; + let maxApiCalls = -1; + let maxResults = -1; + let callback; + const firstArgument = args[0]; + const lastArgument = args[args.length - 1]; + if (typeof firstArgument === 'function') { + callback = firstArgument; + } + else { + query = firstArgument; + } + if (typeof lastArgument === 'function') { + callback = lastArgument; + } + if (typeof query === 'object') { + query = extend(true, {}, query); + // Check if the user only asked for a certain amount of results. + if (query.maxResults && typeof query.maxResults === 'number') { + // `maxResults` is used API-wide. + maxResults = query.maxResults; + } + else if (typeof query.pageSize === 'number') { + // `pageSize` is Pub/Sub's `maxResults`. + maxResults = query.pageSize; + } + if (query.maxApiCalls && typeof query.maxApiCalls === 'number') { + maxApiCalls = query.maxApiCalls; + delete query.maxApiCalls; + } + // maxResults is the user specified limit. + if (maxResults !== -1 || query.autoPaginate === false) { + autoPaginate = false; + } + } + const parsedArguments = { + query: query || {}, + autoPaginate, + maxApiCalls, + maxResults, + callback, + }; + parsedArguments.streamOptions = extend(true, {}, parsedArguments.query); + delete parsedArguments.streamOptions.autoPaginate; + delete parsedArguments.streamOptions.maxResults; + delete parsedArguments.streamOptions.pageSize; + return parsedArguments; + } + /** + * This simply checks to see if `autoPaginate` is set or not, if it's true + * then we buffer all results, otherwise simply call the original method. + * + * @param {array} parsedArguments - Parsed arguments from the original method + * call. + * @param {object=|string=} parsedArguments.query - Query object. This is most + * commonly an object, but to make the API more simple, it can also be a + * string in some places. + * @param {function=} parsedArguments.callback - Callback function. + * @param {boolean} parsedArguments.autoPaginate - Auto-pagination enabled. + * @param {boolean} parsedArguments.maxApiCalls - Maximum API calls to make. + * @param {number} parsedArguments.maxResults - Maximum results to return. + * @param {function} originalMethod - The cached method that accepts a callback + * and returns `nextQuery` to receive more results. + */ + run_(parsedArguments, originalMethod) { + const query = parsedArguments.query; + const callback = parsedArguments.callback; + if (!parsedArguments.autoPaginate) { + return originalMethod(query, callback); + } + const results = new Array(); + let otherArgs = []; + const promise = new Promise((resolve, reject) => { + const stream = paginator.runAsStream_(parsedArguments, originalMethod); + stream + .on('error', reject) + .on('data', (data) => results.push(data)) + .on('end', () => { + otherArgs = stream._otherArgs || []; + resolve(results); + }); + }); + if (!callback) { + return promise.then(results => [results, query, ...otherArgs]); + } + promise.then(results => callback(null, results, query, ...otherArgs), (err) => callback(err)); + } + /** + * This method simply calls the nextQuery recursively, emitting results to a + * stream. The stream ends when `nextQuery` is null. + * + * `maxResults` will act as a cap for how many results are fetched and emitted + * to the stream. + * + * @param {object=|string=} parsedArguments.query - Query object. This is most + * commonly an object, but to make the API more simple, it can also be a + * string in some places. + * @param {function=} parsedArguments.callback - Callback function. + * @param {boolean} parsedArguments.autoPaginate - Auto-pagination enabled. + * @param {boolean} parsedArguments.maxApiCalls - Maximum API calls to make. + * @param {number} parsedArguments.maxResults - Maximum results to return. + * @param {function} originalMethod - The cached method that accepts a callback + * and returns `nextQuery` to receive more results. + * @return {stream} - Readable object stream. + */ + /* eslint-disable @typescript-eslint/no-explicit-any */ + runAsStream_(parsedArguments, originalMethod) { + return new resource_stream_1.ResourceStream(parsedArguments, originalMethod); + } +} +exports.Paginator = Paginator; +const paginator = new Paginator(); +exports.paginator = paginator; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 7618: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/*! + * Copyright 2019 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ResourceStream = void 0; +const stream_1 = __nccwpck_require__(2203); +class ResourceStream extends stream_1.Transform { + constructor(args, requestFn) { + const options = Object.assign({ objectMode: true }, args.streamOptions); + super(options); + this._ended = false; + this._maxApiCalls = args.maxApiCalls === -1 ? Infinity : args.maxApiCalls; + this._nextQuery = args.query; + this._reading = false; + this._requestFn = requestFn; + this._requestsMade = 0; + this._resultsToSend = args.maxResults === -1 ? Infinity : args.maxResults; + this._otherArgs = []; + } + /* eslint-disable @typescript-eslint/no-explicit-any */ + end(...args) { + this._ended = true; + return super.end(...args); + } + _read() { + if (this._reading) { + return; + } + this._reading = true; + // Wrap in a try/catch to catch input linting errors, e.g. + // an invalid BigQuery query. These errors are thrown in an + // async fashion, which makes them un-catchable by the user. + try { + this._requestFn(this._nextQuery, (err, results, nextQuery, ...otherArgs) => { + if (err) { + this.destroy(err); + return; + } + this._otherArgs = otherArgs; + this._nextQuery = nextQuery; + if (this._resultsToSend !== Infinity) { + results = results.splice(0, this._resultsToSend); + this._resultsToSend -= results.length; + } + let more = true; + for (const result of results) { + if (this._ended) { + break; + } + more = this.push(result); + } + const isFinished = !this._nextQuery || this._resultsToSend < 1; + const madeMaxCalls = ++this._requestsMade >= this._maxApiCalls; + if (isFinished || madeMaxCalls) { + this.end(); + } + if (more && !this._ended) { + setImmediate(() => this._read()); + } + this._reading = false; + }); + } + catch (e) { + this.destroy(e); + } + } +} +exports.ResourceStream = ResourceStream; +//# sourceMappingURL=resource-stream.js.map + +/***/ }), + +/***/ 7635: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.MissingProjectIdError = exports.replaceProjectIdToken = void 0; +const stream_1 = __nccwpck_require__(2203); +// Copyright 2014 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +/** + * Populate the `{{projectId}}` placeholder. + * + * @throws {Error} If a projectId is required, but one is not provided. + * + * @param {*} - Any input value that may contain a placeholder. Arrays and objects will be looped. + * @param {string} projectId - A projectId. If not provided + * @return {*} - The original argument with all placeholders populated. + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function replaceProjectIdToken(value, projectId) { + if (Array.isArray(value)) { + value = value.map(v => replaceProjectIdToken(v, projectId)); + } + if (value !== null && + typeof value === 'object' && + !(value instanceof Buffer) && + !(value instanceof stream_1.Stream) && + typeof value.hasOwnProperty === 'function') { + for (const opt in value) { + // eslint-disable-next-line no-prototype-builtins + if (value.hasOwnProperty(opt)) { + value[opt] = replaceProjectIdToken(value[opt], projectId); + } + } + } + if (typeof value === 'string' && + value.indexOf('{{projectId}}') > -1) { + if (!projectId || projectId === '{{projectId}}') { + throw new MissingProjectIdError(); + } + value = value.replace(/{{projectId}}/g, projectId); + } + return value; +} +exports.replaceProjectIdToken = replaceProjectIdToken; +/** + * Custom error type for missing project ID errors. + */ +class MissingProjectIdError extends Error { + constructor() { + super(...arguments); + this.message = `Sorry, we cannot connect to Cloud Services without a project + ID. You may specify one with an environment variable named + "GOOGLE_CLOUD_PROJECT".`.replace(/ +/g, ' '); + } +} +exports.MissingProjectIdError = MissingProjectIdError; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 206: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* eslint-disable prefer-rest-params */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.callbackifyAll = exports.callbackify = exports.promisifyAll = exports.promisify = void 0; +/** + * Wraps a callback style function to conditionally return a promise. + * + * @param {function} originalMethod - The method to promisify. + * @param {object=} options - Promise options. + * @param {boolean} options.singular - Resolve the promise with single arg instead of an array. + * @return {function} wrapped + */ +function promisify(originalMethod, options) { + if (originalMethod.promisified_) { + return originalMethod; + } + options = options || {}; + const slice = Array.prototype.slice; + // tslint:disable-next-line:no-any + const wrapper = function () { + let last; + for (last = arguments.length - 1; last >= 0; last--) { + const arg = arguments[last]; + if (typeof arg === 'undefined') { + continue; // skip trailing undefined. + } + if (typeof arg !== 'function') { + break; // non-callback last argument found. + } + return originalMethod.apply(this, arguments); + } + // peel trailing undefined. + const args = slice.call(arguments, 0, last + 1); + // tslint:disable-next-line:variable-name + let PromiseCtor = Promise; + // Because dedupe will likely create a single install of + // @google-cloud/common to be shared amongst all modules, we need to + // localize it at the Service level. + if (this && this.Promise) { + PromiseCtor = this.Promise; + } + return new PromiseCtor((resolve, reject) => { + // tslint:disable-next-line:no-any + args.push((...args) => { + const callbackArgs = slice.call(args); + const err = callbackArgs.shift(); + if (err) { + return reject(err); + } + if (options.singular && callbackArgs.length === 1) { + resolve(callbackArgs[0]); + } + else { + resolve(callbackArgs); + } + }); + originalMethod.apply(this, args); + }); + }; + wrapper.promisified_ = true; + return wrapper; +} +exports.promisify = promisify; +/** + * Promisifies certain Class methods. This will not promisify private or + * streaming methods. + * + * @param {module:common/service} Class - Service class. + * @param {object=} options - Configuration object. + */ +// tslint:disable-next-line:variable-name +function promisifyAll(Class, options) { + const exclude = (options && options.exclude) || []; + const ownPropertyNames = Object.getOwnPropertyNames(Class.prototype); + const methods = ownPropertyNames.filter(methodName => { + // clang-format off + return (!exclude.includes(methodName) && + typeof Class.prototype[methodName] === 'function' && // is it a function? + !/(^_|(Stream|_)|promise$)|^constructor$/.test(methodName) // is it promisable? + ); + // clang-format on + }); + methods.forEach(methodName => { + const originalMethod = Class.prototype[methodName]; + if (!originalMethod.promisified_) { + Class.prototype[methodName] = exports.promisify(originalMethod, options); + } + }); +} +exports.promisifyAll = promisifyAll; +/** + * Wraps a promisy type function to conditionally call a callback function. + * + * @param {function} originalMethod - The method to callbackify. + * @param {object=} options - Callback options. + * @param {boolean} options.singular - Pass to the callback a single arg instead of an array. + * @return {function} wrapped + */ +function callbackify(originalMethod) { + if (originalMethod.callbackified_) { + return originalMethod; + } + // tslint:disable-next-line:no-any + const wrapper = function () { + if (typeof arguments[arguments.length - 1] !== 'function') { + return originalMethod.apply(this, arguments); + } + const cb = Array.prototype.pop.call(arguments); + originalMethod.apply(this, arguments).then( + // tslint:disable-next-line:no-any + (res) => { + res = Array.isArray(res) ? res : [res]; + cb(null, ...res); + }, (err) => cb(err)); + }; + wrapper.callbackified_ = true; + return wrapper; +} +exports.callbackify = callbackify; +/** + * Callbackifies certain Class methods. This will not callbackify private or + * streaming methods. + * + * @param {module:common/service} Class - Service class. + * @param {object=} options - Configuration object. + */ +function callbackifyAll( +// tslint:disable-next-line:variable-name +Class, options) { + const exclude = (options && options.exclude) || []; + const ownPropertyNames = Object.getOwnPropertyNames(Class.prototype); + const methods = ownPropertyNames.filter(methodName => { + // clang-format off + return (!exclude.includes(methodName) && + typeof Class.prototype[methodName] === 'function' && // is it a function? + !/^_|(Stream|_)|^constructor$/.test(methodName) // is it callbackifyable? + ); + // clang-format on + }); + methods.forEach(methodName => { + const originalMethod = Class.prototype[methodName]; + if (!originalMethod.callbackified_) { + Class.prototype[methodName] = exports.callbackify(originalMethod); + } + }); +} +exports.callbackifyAll = callbackifyAll; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 9977: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +const Queue = __nccwpck_require__(538); + +const pLimit = concurrency => { + if (!((Number.isInteger(concurrency) || concurrency === Infinity) && concurrency > 0)) { + throw new TypeError('Expected `concurrency` to be a number from 1 and up'); + } + + const queue = new Queue(); + let activeCount = 0; + + const next = () => { + activeCount--; + + if (queue.size > 0) { + queue.dequeue()(); + } + }; + + const run = async (fn, resolve, ...args) => { + activeCount++; + + const result = (async () => fn(...args))(); + + resolve(result); + + try { + await result; + } catch {} + + next(); + }; + + const enqueue = (fn, resolve, ...args) => { + queue.enqueue(run.bind(null, fn, resolve, ...args)); + + (async () => { + // This function needs to wait until the next microtask before comparing + // `activeCount` to `concurrency`, because `activeCount` is updated asynchronously + // when the run function is dequeued and called. The comparison in the if-statement + // needs to happen asynchronously as well to get an up-to-date value for `activeCount`. + await Promise.resolve(); + + if (activeCount < concurrency && queue.size > 0) { + queue.dequeue()(); + } + })(); + }; + + const generator = (fn, ...args) => new Promise(resolve => { + enqueue(fn, resolve, ...args); + }); + + Object.defineProperties(generator, { + activeCount: { + get: () => activeCount + }, + pendingCount: { + get: () => queue.size + }, + clearQueue: { + value: () => { + queue.clear(); + } + } + }); + + return generator; +}; + +module.exports = pLimit; + + /***/ }), /***/ 9750: @@ -42751,7 +42718,7 @@ exports.ContextAPI = ContextAPI; */ Object.defineProperty(exports, "__esModule", ({ value: true })); exports.DiagAPI = void 0; -const ComponentLogger_1 = __nccwpck_require__(7723); +const ComponentLogger_1 = __nccwpck_require__(104); const logLevelLogger_1 = __nccwpck_require__(3514); const types_1 = __nccwpck_require__(2573); const global_utils_1 = __nccwpck_require__(9923); @@ -43473,7 +43440,7 @@ exports.diag = diag_1.DiagAPI.instance(); /***/ }), -/***/ 7723: +/***/ 104: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -50282,6 +50249,531 @@ class ReflectionTypeCheck { exports.ReflectionTypeCheck = ReflectionTypeCheck; +/***/ }), + +/***/ 8662: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +function once(emitter, name, { signal } = {}) { + return new Promise((resolve, reject) => { + function cleanup() { + signal === null || signal === void 0 ? void 0 : signal.removeEventListener('abort', cleanup); + emitter.removeListener(name, onEvent); + emitter.removeListener('error', onError); + } + function onEvent(...args) { + cleanup(); + resolve(args); + } + function onError(err) { + cleanup(); + reject(err); + } + signal === null || signal === void 0 ? void 0 : signal.addEventListener('abort', cleanup); + emitter.on(name, onEvent); + emitter.on('error', onError); + }); +} +exports["default"] = once; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 7413: +/***/ ((module, exports, __nccwpck_require__) => { + +"use strict"; +/** + * @author Toru Nagashima + * See LICENSE file in root directory for full license. + */ + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +var eventTargetShim = __nccwpck_require__(6577); + +/** + * The signal class. + * @see https://dom.spec.whatwg.org/#abortsignal + */ +class AbortSignal extends eventTargetShim.EventTarget { + /** + * AbortSignal cannot be constructed directly. + */ + constructor() { + super(); + throw new TypeError("AbortSignal cannot be constructed directly"); + } + /** + * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise. + */ + get aborted() { + const aborted = abortedFlags.get(this); + if (typeof aborted !== "boolean") { + throw new TypeError(`Expected 'this' to be an 'AbortSignal' object, but got ${this === null ? "null" : typeof this}`); + } + return aborted; + } +} +eventTargetShim.defineEventAttribute(AbortSignal.prototype, "abort"); +/** + * Create an AbortSignal object. + */ +function createAbortSignal() { + const signal = Object.create(AbortSignal.prototype); + eventTargetShim.EventTarget.call(signal); + abortedFlags.set(signal, false); + return signal; +} +/** + * Abort a given signal. + */ +function abortSignal(signal) { + if (abortedFlags.get(signal) !== false) { + return; + } + abortedFlags.set(signal, true); + signal.dispatchEvent({ type: "abort" }); +} +/** + * Aborted flag for each instances. + */ +const abortedFlags = new WeakMap(); +// Properties should be enumerable. +Object.defineProperties(AbortSignal.prototype, { + aborted: { enumerable: true }, +}); +// `toString()` should return `"[object AbortSignal]"` +if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, { + configurable: true, + value: "AbortSignal", + }); +} + +/** + * The AbortController. + * @see https://dom.spec.whatwg.org/#abortcontroller + */ +class AbortController { + /** + * Initialize this controller. + */ + constructor() { + signals.set(this, createAbortSignal()); + } + /** + * Returns the `AbortSignal` object associated with this object. + */ + get signal() { + return getSignal(this); + } + /** + * Abort and signal to any observers that the associated activity is to be aborted. + */ + abort() { + abortSignal(getSignal(this)); + } +} +/** + * Associated signals. + */ +const signals = new WeakMap(); +/** + * Get the associated signal of a given controller. + */ +function getSignal(controller) { + const signal = signals.get(controller); + if (signal == null) { + throw new TypeError(`Expected 'this' to be an 'AbortController' object, but got ${controller === null ? "null" : typeof controller}`); + } + return signal; +} +// Properties should be enumerable. +Object.defineProperties(AbortController.prototype, { + signal: { enumerable: true }, + abort: { enumerable: true }, +}); +if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(AbortController.prototype, Symbol.toStringTag, { + configurable: true, + value: "AbortController", + }); +} + +exports.AbortController = AbortController; +exports.AbortSignal = AbortSignal; +exports["default"] = AbortController; + +module.exports = AbortController +module.exports.AbortController = module.exports["default"] = AbortController +module.exports.AbortSignal = AbortSignal +//# sourceMappingURL=abort-controller.js.map + + +/***/ }), + +/***/ 5183: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.req = exports.json = exports.toBuffer = void 0; +const http = __importStar(__nccwpck_require__(8611)); +const https = __importStar(__nccwpck_require__(5692)); +async function toBuffer(stream) { + let length = 0; + const chunks = []; + for await (const chunk of stream) { + length += chunk.length; + chunks.push(chunk); + } + return Buffer.concat(chunks, length); +} +exports.toBuffer = toBuffer; +// eslint-disable-next-line @typescript-eslint/no-explicit-any +async function json(stream) { + const buf = await toBuffer(stream); + const str = buf.toString('utf8'); + try { + return JSON.parse(str); + } + catch (_err) { + const err = _err; + err.message += ` (input: ${str})`; + throw err; + } +} +exports.json = json; +function req(url, opts = {}) { + const href = typeof url === 'string' ? url : url.href; + const req = (href.startsWith('https:') ? https : http).request(url, opts); + const promise = new Promise((resolve, reject) => { + req + .once('response', resolve) + .once('error', reject) + .end(); + }); + req.then = promise.then.bind(promise); + return req; +} +exports.req = req; +//# sourceMappingURL=helpers.js.map + +/***/ }), + +/***/ 8894: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Agent = void 0; +const net = __importStar(__nccwpck_require__(9278)); +const http = __importStar(__nccwpck_require__(8611)); +const https_1 = __nccwpck_require__(5692); +__exportStar(__nccwpck_require__(5183), exports); +const INTERNAL = Symbol('AgentBaseInternalState'); +class Agent extends http.Agent { + constructor(opts) { + super(opts); + this[INTERNAL] = {}; + } + /** + * Determine whether this is an `http` or `https` request. + */ + isSecureEndpoint(options) { + if (options) { + // First check the `secureEndpoint` property explicitly, since this + // means that a parent `Agent` is "passing through" to this instance. + // eslint-disable-next-line @typescript-eslint/no-explicit-any + if (typeof options.secureEndpoint === 'boolean') { + return options.secureEndpoint; + } + // If no explicit `secure` endpoint, check if `protocol` property is + // set. This will usually be the case since using a full string URL + // or `URL` instance should be the most common usage. + if (typeof options.protocol === 'string') { + return options.protocol === 'https:'; + } + } + // Finally, if no `protocol` property was set, then fall back to + // checking the stack trace of the current call stack, and try to + // detect the "https" module. + const { stack } = new Error(); + if (typeof stack !== 'string') + return false; + return stack + .split('\n') + .some((l) => l.indexOf('(https.js:') !== -1 || + l.indexOf('node:https:') !== -1); + } + // In order to support async signatures in `connect()` and Node's native + // connection pooling in `http.Agent`, the array of sockets for each origin + // has to be updated synchronously. This is so the length of the array is + // accurate when `addRequest()` is next called. We achieve this by creating a + // fake socket and adding it to `sockets[origin]` and incrementing + // `totalSocketCount`. + incrementSockets(name) { + // If `maxSockets` and `maxTotalSockets` are both Infinity then there is no + // need to create a fake socket because Node.js native connection pooling + // will never be invoked. + if (this.maxSockets === Infinity && this.maxTotalSockets === Infinity) { + return null; + } + // All instances of `sockets` are expected TypeScript errors. The + // alternative is to add it as a private property of this class but that + // will break TypeScript subclassing. + if (!this.sockets[name]) { + // @ts-expect-error `sockets` is readonly in `@types/node` + this.sockets[name] = []; + } + const fakeSocket = new net.Socket({ writable: false }); + this.sockets[name].push(fakeSocket); + // @ts-expect-error `totalSocketCount` isn't defined in `@types/node` + this.totalSocketCount++; + return fakeSocket; + } + decrementSockets(name, socket) { + if (!this.sockets[name] || socket === null) { + return; + } + const sockets = this.sockets[name]; + const index = sockets.indexOf(socket); + if (index !== -1) { + sockets.splice(index, 1); + // @ts-expect-error `totalSocketCount` isn't defined in `@types/node` + this.totalSocketCount--; + if (sockets.length === 0) { + // @ts-expect-error `sockets` is readonly in `@types/node` + delete this.sockets[name]; + } + } + } + // In order to properly update the socket pool, we need to call `getName()` on + // the core `https.Agent` if it is a secureEndpoint. + getName(options) { + const secureEndpoint = typeof options.secureEndpoint === 'boolean' + ? options.secureEndpoint + : this.isSecureEndpoint(options); + if (secureEndpoint) { + // @ts-expect-error `getName()` isn't defined in `@types/node` + return https_1.Agent.prototype.getName.call(this, options); + } + // @ts-expect-error `getName()` isn't defined in `@types/node` + return super.getName(options); + } + createSocket(req, options, cb) { + const connectOpts = { + ...options, + secureEndpoint: this.isSecureEndpoint(options), + }; + const name = this.getName(connectOpts); + const fakeSocket = this.incrementSockets(name); + Promise.resolve() + .then(() => this.connect(req, connectOpts)) + .then((socket) => { + this.decrementSockets(name, fakeSocket); + if (socket instanceof http.Agent) { + try { + // @ts-expect-error `addRequest()` isn't defined in `@types/node` + return socket.addRequest(req, connectOpts); + } + catch (err) { + return cb(err); + } + } + this[INTERNAL].currentSocket = socket; + // @ts-expect-error `createSocket()` isn't defined in `@types/node` + super.createSocket(req, options, cb); + }, (err) => { + this.decrementSockets(name, fakeSocket); + cb(err); + }); + } + createConnection() { + const socket = this[INTERNAL].currentSocket; + this[INTERNAL].currentSocket = undefined; + if (!socket) { + throw new Error('No socket was returned in the `connect()` function'); + } + return socket; + } + get defaultPort() { + return (this[INTERNAL].defaultPort ?? + (this.protocol === 'https:' ? 443 : 80)); + } + set defaultPort(v) { + if (this[INTERNAL]) { + this[INTERNAL].defaultPort = v; + } + } + get protocol() { + return (this[INTERNAL].protocol ?? + (this.isSecureEndpoint() ? 'https:' : 'http:')); + } + set protocol(v) { + if (this[INTERNAL]) { + this[INTERNAL].protocol = v; + } + } +} +exports.Agent = Agent; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 6251: +/***/ ((module) => { + +"use strict"; + + +const arrify = value => { + if (value === null || value === undefined) { + return []; + } + + if (Array.isArray(value)) { + return value; + } + + if (typeof value === 'string') { + return [value]; + } + + if (typeof value[Symbol.iterator] === 'function') { + return [...value]; + } + + return [value]; +}; + +module.exports = arrify; + + +/***/ }), + +/***/ 5195: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +// Packages +var retrier = __nccwpck_require__(5546); + +function retry(fn, opts) { + function run(resolve, reject) { + var options = opts || {}; + var op; + + // Default `randomize` to true + if (!('randomize' in options)) { + options.randomize = true; + } + + op = retrier.operation(options); + + // We allow the user to abort retrying + // this makes sense in the cases where + // knowledge is obtained that retrying + // would be futile (e.g.: auth errors) + + function bail(err) { + reject(err || new Error('Aborted')); + } + + function onError(err, num) { + if (err.bail) { + bail(err); + return; + } + + if (!op.retry(err)) { + reject(op.mainError()); + } else if (options.onRetry) { + options.onRetry(err, num); + } + } + + function runAttempt(num) { + var val; + + try { + val = fn(bail, num); + } catch (err) { + onError(err, num); + return; + } + + Promise.resolve(val) + .then(resolve) + .catch(function catchIt(err) { + onError(err, num); + }); + } + + op.attempt(runAttempt); + } + + return new Promise(run); +} + +module.exports = retry; + + /***/ }), /***/ 1324: @@ -50793,6 +51285,3093 @@ function range(a, b, str) { } +/***/ }), + +/***/ 8793: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +exports.byteLength = byteLength +exports.toByteArray = toByteArray +exports.fromByteArray = fromByteArray + +var lookup = [] +var revLookup = [] +var Arr = typeof Uint8Array !== 'undefined' ? Uint8Array : Array + +var code = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/' +for (var i = 0, len = code.length; i < len; ++i) { + lookup[i] = code[i] + revLookup[code.charCodeAt(i)] = i +} + +// Support decoding URL-safe base64 strings, as Node.js does. +// See: https://en.wikipedia.org/wiki/Base64#URL_applications +revLookup['-'.charCodeAt(0)] = 62 +revLookup['_'.charCodeAt(0)] = 63 + +function getLens (b64) { + var len = b64.length + + if (len % 4 > 0) { + throw new Error('Invalid string. Length must be a multiple of 4') + } + + // Trim off extra bytes after placeholder bytes are found + // See: https://github.com/beatgammit/base64-js/issues/42 + var validLen = b64.indexOf('=') + if (validLen === -1) validLen = len + + var placeHoldersLen = validLen === len + ? 0 + : 4 - (validLen % 4) + + return [validLen, placeHoldersLen] +} + +// base64 is 4/3 + up to two characters of the original data +function byteLength (b64) { + var lens = getLens(b64) + var validLen = lens[0] + var placeHoldersLen = lens[1] + return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen +} + +function _byteLength (b64, validLen, placeHoldersLen) { + return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen +} + +function toByteArray (b64) { + var tmp + var lens = getLens(b64) + var validLen = lens[0] + var placeHoldersLen = lens[1] + + var arr = new Arr(_byteLength(b64, validLen, placeHoldersLen)) + + var curByte = 0 + + // if there are placeholders, only get up to the last complete 4 chars + var len = placeHoldersLen > 0 + ? validLen - 4 + : validLen + + var i + for (i = 0; i < len; i += 4) { + tmp = + (revLookup[b64.charCodeAt(i)] << 18) | + (revLookup[b64.charCodeAt(i + 1)] << 12) | + (revLookup[b64.charCodeAt(i + 2)] << 6) | + revLookup[b64.charCodeAt(i + 3)] + arr[curByte++] = (tmp >> 16) & 0xFF + arr[curByte++] = (tmp >> 8) & 0xFF + arr[curByte++] = tmp & 0xFF + } + + if (placeHoldersLen === 2) { + tmp = + (revLookup[b64.charCodeAt(i)] << 2) | + (revLookup[b64.charCodeAt(i + 1)] >> 4) + arr[curByte++] = tmp & 0xFF + } + + if (placeHoldersLen === 1) { + tmp = + (revLookup[b64.charCodeAt(i)] << 10) | + (revLookup[b64.charCodeAt(i + 1)] << 4) | + (revLookup[b64.charCodeAt(i + 2)] >> 2) + arr[curByte++] = (tmp >> 8) & 0xFF + arr[curByte++] = tmp & 0xFF + } + + return arr +} + +function tripletToBase64 (num) { + return lookup[num >> 18 & 0x3F] + + lookup[num >> 12 & 0x3F] + + lookup[num >> 6 & 0x3F] + + lookup[num & 0x3F] +} + +function encodeChunk (uint8, start, end) { + var tmp + var output = [] + for (var i = start; i < end; i += 3) { + tmp = + ((uint8[i] << 16) & 0xFF0000) + + ((uint8[i + 1] << 8) & 0xFF00) + + (uint8[i + 2] & 0xFF) + output.push(tripletToBase64(tmp)) + } + return output.join('') +} + +function fromByteArray (uint8) { + var tmp + var len = uint8.length + var extraBytes = len % 3 // if we have 1 byte left, pad 2 bytes + var parts = [] + var maxChunkLength = 16383 // must be multiple of 3 + + // go through the array every three bytes, we'll deal with trailing stuff later + for (var i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) { + parts.push(encodeChunk(uint8, i, (i + maxChunkLength) > len2 ? len2 : (i + maxChunkLength))) + } + + // pad the end with zeros, but make sure to not forget the extra bytes + if (extraBytes === 1) { + tmp = uint8[len - 1] + parts.push( + lookup[tmp >> 2] + + lookup[(tmp << 4) & 0x3F] + + '==' + ) + } else if (extraBytes === 2) { + tmp = (uint8[len - 2] << 8) + uint8[len - 1] + parts.push( + lookup[tmp >> 10] + + lookup[(tmp >> 4) & 0x3F] + + lookup[(tmp << 2) & 0x3F] + + '=' + ) + } + + return parts.join('') +} + + +/***/ }), + +/***/ 1259: +/***/ (function(module) { + +;(function (globalObject) { + 'use strict'; + +/* + * bignumber.js v9.2.1 + * A JavaScript library for arbitrary-precision arithmetic. + * https://github.com/MikeMcl/bignumber.js + * Copyright (c) 2025 Michael Mclaughlin + * MIT Licensed. + * + * BigNumber.prototype methods | BigNumber methods + * | + * absoluteValue abs | clone + * comparedTo | config set + * decimalPlaces dp | DECIMAL_PLACES + * dividedBy div | ROUNDING_MODE + * dividedToIntegerBy idiv | EXPONENTIAL_AT + * exponentiatedBy pow | RANGE + * integerValue | CRYPTO + * isEqualTo eq | MODULO_MODE + * isFinite | POW_PRECISION + * isGreaterThan gt | FORMAT + * isGreaterThanOrEqualTo gte | ALPHABET + * isInteger | isBigNumber + * isLessThan lt | maximum max + * isLessThanOrEqualTo lte | minimum min + * isNaN | random + * isNegative | sum + * isPositive | + * isZero | + * minus | + * modulo mod | + * multipliedBy times | + * negated | + * plus | + * precision sd | + * shiftedBy | + * squareRoot sqrt | + * toExponential | + * toFixed | + * toFormat | + * toFraction | + * toJSON | + * toNumber | + * toPrecision | + * toString | + * valueOf | + * + */ + + + var BigNumber, + isNumeric = /^-?(?:\d+(?:\.\d*)?|\.\d+)(?:e[+-]?\d+)?$/i, + mathceil = Math.ceil, + mathfloor = Math.floor, + + bignumberError = '[BigNumber Error] ', + tooManyDigits = bignumberError + 'Number primitive has more than 15 significant digits: ', + + BASE = 1e14, + LOG_BASE = 14, + MAX_SAFE_INTEGER = 0x1fffffffffffff, // 2^53 - 1 + // MAX_INT32 = 0x7fffffff, // 2^31 - 1 + POWS_TEN = [1, 10, 100, 1e3, 1e4, 1e5, 1e6, 1e7, 1e8, 1e9, 1e10, 1e11, 1e12, 1e13], + SQRT_BASE = 1e7, + + // EDITABLE + // The limit on the value of DECIMAL_PLACES, TO_EXP_NEG, TO_EXP_POS, MIN_EXP, MAX_EXP, and + // the arguments to toExponential, toFixed, toFormat, and toPrecision. + MAX = 1E9; // 0 to MAX_INT32 + + + /* + * Create and return a BigNumber constructor. + */ + function clone(configObject) { + var div, convertBase, parseNumeric, + P = BigNumber.prototype = { constructor: BigNumber, toString: null, valueOf: null }, + ONE = new BigNumber(1), + + + //----------------------------- EDITABLE CONFIG DEFAULTS ------------------------------- + + + // The default values below must be integers within the inclusive ranges stated. + // The values can also be changed at run-time using BigNumber.set. + + // The maximum number of decimal places for operations involving division. + DECIMAL_PLACES = 20, // 0 to MAX + + // The rounding mode used when rounding to the above decimal places, and when using + // toExponential, toFixed, toFormat and toPrecision, and round (default value). + // UP 0 Away from zero. + // DOWN 1 Towards zero. + // CEIL 2 Towards +Infinity. + // FLOOR 3 Towards -Infinity. + // HALF_UP 4 Towards nearest neighbour. If equidistant, up. + // HALF_DOWN 5 Towards nearest neighbour. If equidistant, down. + // HALF_EVEN 6 Towards nearest neighbour. If equidistant, towards even neighbour. + // HALF_CEIL 7 Towards nearest neighbour. If equidistant, towards +Infinity. + // HALF_FLOOR 8 Towards nearest neighbour. If equidistant, towards -Infinity. + ROUNDING_MODE = 4, // 0 to 8 + + // EXPONENTIAL_AT : [TO_EXP_NEG , TO_EXP_POS] + + // The exponent value at and beneath which toString returns exponential notation. + // Number type: -7 + TO_EXP_NEG = -7, // 0 to -MAX + + // The exponent value at and above which toString returns exponential notation. + // Number type: 21 + TO_EXP_POS = 21, // 0 to MAX + + // RANGE : [MIN_EXP, MAX_EXP] + + // The minimum exponent value, beneath which underflow to zero occurs. + // Number type: -324 (5e-324) + MIN_EXP = -1e7, // -1 to -MAX + + // The maximum exponent value, above which overflow to Infinity occurs. + // Number type: 308 (1.7976931348623157e+308) + // For MAX_EXP > 1e7, e.g. new BigNumber('1e100000000').plus(1) may be slow. + MAX_EXP = 1e7, // 1 to MAX + + // Whether to use cryptographically-secure random number generation, if available. + CRYPTO = false, // true or false + + // The modulo mode used when calculating the modulus: a mod n. + // The quotient (q = a / n) is calculated according to the corresponding rounding mode. + // The remainder (r) is calculated as: r = a - n * q. + // + // UP 0 The remainder is positive if the dividend is negative, else is negative. + // DOWN 1 The remainder has the same sign as the dividend. + // This modulo mode is commonly known as 'truncated division' and is + // equivalent to (a % n) in JavaScript. + // FLOOR 3 The remainder has the same sign as the divisor (Python %). + // HALF_EVEN 6 This modulo mode implements the IEEE 754 remainder function. + // EUCLID 9 Euclidian division. q = sign(n) * floor(a / abs(n)). + // The remainder is always positive. + // + // The truncated division, floored division, Euclidian division and IEEE 754 remainder + // modes are commonly used for the modulus operation. + // Although the other rounding modes can also be used, they may not give useful results. + MODULO_MODE = 1, // 0 to 9 + + // The maximum number of significant digits of the result of the exponentiatedBy operation. + // If POW_PRECISION is 0, there will be unlimited significant digits. + POW_PRECISION = 0, // 0 to MAX + + // The format specification used by the BigNumber.prototype.toFormat method. + FORMAT = { + prefix: '', + groupSize: 3, + secondaryGroupSize: 0, + groupSeparator: ',', + decimalSeparator: '.', + fractionGroupSize: 0, + fractionGroupSeparator: '\xA0', // non-breaking space + suffix: '' + }, + + // The alphabet used for base conversion. It must be at least 2 characters long, with no '+', + // '-', '.', whitespace, or repeated character. + // '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ$_' + ALPHABET = '0123456789abcdefghijklmnopqrstuvwxyz', + alphabetHasNormalDecimalDigits = true; + + + //------------------------------------------------------------------------------------------ + + + // CONSTRUCTOR + + + /* + * The BigNumber constructor and exported function. + * Create and return a new instance of a BigNumber object. + * + * v {number|string|BigNumber} A numeric value. + * [b] {number} The base of v. Integer, 2 to ALPHABET.length inclusive. + */ + function BigNumber(v, b) { + var alphabet, c, caseChanged, e, i, isNum, len, str, + x = this; + + // Enable constructor call without `new`. + if (!(x instanceof BigNumber)) return new BigNumber(v, b); + + if (b == null) { + + if (v && v._isBigNumber === true) { + x.s = v.s; + + if (!v.c || v.e > MAX_EXP) { + x.c = x.e = null; + } else if (v.e < MIN_EXP) { + x.c = [x.e = 0]; + } else { + x.e = v.e; + x.c = v.c.slice(); + } + + return; + } + + if ((isNum = typeof v == 'number') && v * 0 == 0) { + + // Use `1 / n` to handle minus zero also. + x.s = 1 / v < 0 ? (v = -v, -1) : 1; + + // Fast path for integers, where n < 2147483648 (2**31). + if (v === ~~v) { + for (e = 0, i = v; i >= 10; i /= 10, e++); + + if (e > MAX_EXP) { + x.c = x.e = null; + } else { + x.e = e; + x.c = [v]; + } + + return; + } + + str = String(v); + } else { + + if (!isNumeric.test(str = String(v))) return parseNumeric(x, str, isNum); + + x.s = str.charCodeAt(0) == 45 ? (str = str.slice(1), -1) : 1; + } + + // Decimal point? + if ((e = str.indexOf('.')) > -1) str = str.replace('.', ''); + + // Exponential form? + if ((i = str.search(/e/i)) > 0) { + + // Determine exponent. + if (e < 0) e = i; + e += +str.slice(i + 1); + str = str.substring(0, i); + } else if (e < 0) { + + // Integer. + e = str.length; + } + + } else { + + // '[BigNumber Error] Base {not a primitive number|not an integer|out of range}: {b}' + intCheck(b, 2, ALPHABET.length, 'Base'); + + // Allow exponential notation to be used with base 10 argument, while + // also rounding to DECIMAL_PLACES as with other bases. + if (b == 10 && alphabetHasNormalDecimalDigits) { + x = new BigNumber(v); + return round(x, DECIMAL_PLACES + x.e + 1, ROUNDING_MODE); + } + + str = String(v); + + if (isNum = typeof v == 'number') { + + // Avoid potential interpretation of Infinity and NaN as base 44+ values. + if (v * 0 != 0) return parseNumeric(x, str, isNum, b); + + x.s = 1 / v < 0 ? (str = str.slice(1), -1) : 1; + + // '[BigNumber Error] Number primitive has more than 15 significant digits: {n}' + if (BigNumber.DEBUG && str.replace(/^0\.0*|\./, '').length > 15) { + throw Error + (tooManyDigits + v); + } + } else { + x.s = str.charCodeAt(0) === 45 ? (str = str.slice(1), -1) : 1; + } + + alphabet = ALPHABET.slice(0, b); + e = i = 0; + + // Check that str is a valid base b number. + // Don't use RegExp, so alphabet can contain special characters. + for (len = str.length; i < len; i++) { + if (alphabet.indexOf(c = str.charAt(i)) < 0) { + if (c == '.') { + + // If '.' is not the first character and it has not be found before. + if (i > e) { + e = len; + continue; + } + } else if (!caseChanged) { + + // Allow e.g. hexadecimal 'FF' as well as 'ff'. + if (str == str.toUpperCase() && (str = str.toLowerCase()) || + str == str.toLowerCase() && (str = str.toUpperCase())) { + caseChanged = true; + i = -1; + e = 0; + continue; + } + } + + return parseNumeric(x, String(v), isNum, b); + } + } + + // Prevent later check for length on converted number. + isNum = false; + str = convertBase(str, b, 10, x.s); + + // Decimal point? + if ((e = str.indexOf('.')) > -1) str = str.replace('.', ''); + else e = str.length; + } + + // Determine leading zeros. + for (i = 0; str.charCodeAt(i) === 48; i++); + + // Determine trailing zeros. + for (len = str.length; str.charCodeAt(--len) === 48;); + + if (str = str.slice(i, ++len)) { + len -= i; + + // '[BigNumber Error] Number primitive has more than 15 significant digits: {n}' + if (isNum && BigNumber.DEBUG && + len > 15 && (v > MAX_SAFE_INTEGER || v !== mathfloor(v))) { + throw Error + (tooManyDigits + (x.s * v)); + } + + // Overflow? + if ((e = e - i - 1) > MAX_EXP) { + + // Infinity. + x.c = x.e = null; + + // Underflow? + } else if (e < MIN_EXP) { + + // Zero. + x.c = [x.e = 0]; + } else { + x.e = e; + x.c = []; + + // Transform base + + // e is the base 10 exponent. + // i is where to slice str to get the first element of the coefficient array. + i = (e + 1) % LOG_BASE; + if (e < 0) i += LOG_BASE; // i < 1 + + if (i < len) { + if (i) x.c.push(+str.slice(0, i)); + + for (len -= LOG_BASE; i < len;) { + x.c.push(+str.slice(i, i += LOG_BASE)); + } + + i = LOG_BASE - (str = str.slice(i)).length; + } else { + i -= len; + } + + for (; i--; str += '0'); + x.c.push(+str); + } + } else { + + // Zero. + x.c = [x.e = 0]; + } + } + + + // CONSTRUCTOR PROPERTIES + + + BigNumber.clone = clone; + + BigNumber.ROUND_UP = 0; + BigNumber.ROUND_DOWN = 1; + BigNumber.ROUND_CEIL = 2; + BigNumber.ROUND_FLOOR = 3; + BigNumber.ROUND_HALF_UP = 4; + BigNumber.ROUND_HALF_DOWN = 5; + BigNumber.ROUND_HALF_EVEN = 6; + BigNumber.ROUND_HALF_CEIL = 7; + BigNumber.ROUND_HALF_FLOOR = 8; + BigNumber.EUCLID = 9; + + + /* + * Configure infrequently-changing library-wide settings. + * + * Accept an object with the following optional properties (if the value of a property is + * a number, it must be an integer within the inclusive range stated): + * + * DECIMAL_PLACES {number} 0 to MAX + * ROUNDING_MODE {number} 0 to 8 + * EXPONENTIAL_AT {number|number[]} -MAX to MAX or [-MAX to 0, 0 to MAX] + * RANGE {number|number[]} -MAX to MAX (not zero) or [-MAX to -1, 1 to MAX] + * CRYPTO {boolean} true or false + * MODULO_MODE {number} 0 to 9 + * POW_PRECISION {number} 0 to MAX + * ALPHABET {string} A string of two or more unique characters which does + * not contain '.'. + * FORMAT {object} An object with some of the following properties: + * prefix {string} + * groupSize {number} + * secondaryGroupSize {number} + * groupSeparator {string} + * decimalSeparator {string} + * fractionGroupSize {number} + * fractionGroupSeparator {string} + * suffix {string} + * + * (The values assigned to the above FORMAT object properties are not checked for validity.) + * + * E.g. + * BigNumber.config({ DECIMAL_PLACES : 20, ROUNDING_MODE : 4 }) + * + * Ignore properties/parameters set to null or undefined, except for ALPHABET. + * + * Return an object with the properties current values. + */ + BigNumber.config = BigNumber.set = function (obj) { + var p, v; + + if (obj != null) { + + if (typeof obj == 'object') { + + // DECIMAL_PLACES {number} Integer, 0 to MAX inclusive. + // '[BigNumber Error] DECIMAL_PLACES {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'DECIMAL_PLACES')) { + v = obj[p]; + intCheck(v, 0, MAX, p); + DECIMAL_PLACES = v; + } + + // ROUNDING_MODE {number} Integer, 0 to 8 inclusive. + // '[BigNumber Error] ROUNDING_MODE {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'ROUNDING_MODE')) { + v = obj[p]; + intCheck(v, 0, 8, p); + ROUNDING_MODE = v; + } + + // EXPONENTIAL_AT {number|number[]} + // Integer, -MAX to MAX inclusive or + // [integer -MAX to 0 inclusive, 0 to MAX inclusive]. + // '[BigNumber Error] EXPONENTIAL_AT {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'EXPONENTIAL_AT')) { + v = obj[p]; + if (v && v.pop) { + intCheck(v[0], -MAX, 0, p); + intCheck(v[1], 0, MAX, p); + TO_EXP_NEG = v[0]; + TO_EXP_POS = v[1]; + } else { + intCheck(v, -MAX, MAX, p); + TO_EXP_NEG = -(TO_EXP_POS = v < 0 ? -v : v); + } + } + + // RANGE {number|number[]} Non-zero integer, -MAX to MAX inclusive or + // [integer -MAX to -1 inclusive, integer 1 to MAX inclusive]. + // '[BigNumber Error] RANGE {not a primitive number|not an integer|out of range|cannot be zero}: {v}' + if (obj.hasOwnProperty(p = 'RANGE')) { + v = obj[p]; + if (v && v.pop) { + intCheck(v[0], -MAX, -1, p); + intCheck(v[1], 1, MAX, p); + MIN_EXP = v[0]; + MAX_EXP = v[1]; + } else { + intCheck(v, -MAX, MAX, p); + if (v) { + MIN_EXP = -(MAX_EXP = v < 0 ? -v : v); + } else { + throw Error + (bignumberError + p + ' cannot be zero: ' + v); + } + } + } + + // CRYPTO {boolean} true or false. + // '[BigNumber Error] CRYPTO not true or false: {v}' + // '[BigNumber Error] crypto unavailable' + if (obj.hasOwnProperty(p = 'CRYPTO')) { + v = obj[p]; + if (v === !!v) { + if (v) { + if (typeof crypto != 'undefined' && crypto && + (crypto.getRandomValues || crypto.randomBytes)) { + CRYPTO = v; + } else { + CRYPTO = !v; + throw Error + (bignumberError + 'crypto unavailable'); + } + } else { + CRYPTO = v; + } + } else { + throw Error + (bignumberError + p + ' not true or false: ' + v); + } + } + + // MODULO_MODE {number} Integer, 0 to 9 inclusive. + // '[BigNumber Error] MODULO_MODE {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'MODULO_MODE')) { + v = obj[p]; + intCheck(v, 0, 9, p); + MODULO_MODE = v; + } + + // POW_PRECISION {number} Integer, 0 to MAX inclusive. + // '[BigNumber Error] POW_PRECISION {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'POW_PRECISION')) { + v = obj[p]; + intCheck(v, 0, MAX, p); + POW_PRECISION = v; + } + + // FORMAT {object} + // '[BigNumber Error] FORMAT not an object: {v}' + if (obj.hasOwnProperty(p = 'FORMAT')) { + v = obj[p]; + if (typeof v == 'object') FORMAT = v; + else throw Error + (bignumberError + p + ' not an object: ' + v); + } + + // ALPHABET {string} + // '[BigNumber Error] ALPHABET invalid: {v}' + if (obj.hasOwnProperty(p = 'ALPHABET')) { + v = obj[p]; + + // Disallow if less than two characters, + // or if it contains '+', '-', '.', whitespace, or a repeated character. + if (typeof v == 'string' && !/^.?$|[+\-.\s]|(.).*\1/.test(v)) { + alphabetHasNormalDecimalDigits = v.slice(0, 10) == '0123456789'; + ALPHABET = v; + } else { + throw Error + (bignumberError + p + ' invalid: ' + v); + } + } + + } else { + + // '[BigNumber Error] Object expected: {v}' + throw Error + (bignumberError + 'Object expected: ' + obj); + } + } + + return { + DECIMAL_PLACES: DECIMAL_PLACES, + ROUNDING_MODE: ROUNDING_MODE, + EXPONENTIAL_AT: [TO_EXP_NEG, TO_EXP_POS], + RANGE: [MIN_EXP, MAX_EXP], + CRYPTO: CRYPTO, + MODULO_MODE: MODULO_MODE, + POW_PRECISION: POW_PRECISION, + FORMAT: FORMAT, + ALPHABET: ALPHABET + }; + }; + + + /* + * Return true if v is a BigNumber instance, otherwise return false. + * + * If BigNumber.DEBUG is true, throw if a BigNumber instance is not well-formed. + * + * v {any} + * + * '[BigNumber Error] Invalid BigNumber: {v}' + */ + BigNumber.isBigNumber = function (v) { + if (!v || v._isBigNumber !== true) return false; + if (!BigNumber.DEBUG) return true; + + var i, n, + c = v.c, + e = v.e, + s = v.s; + + out: if ({}.toString.call(c) == '[object Array]') { + + if ((s === 1 || s === -1) && e >= -MAX && e <= MAX && e === mathfloor(e)) { + + // If the first element is zero, the BigNumber value must be zero. + if (c[0] === 0) { + if (e === 0 && c.length === 1) return true; + break out; + } + + // Calculate number of digits that c[0] should have, based on the exponent. + i = (e + 1) % LOG_BASE; + if (i < 1) i += LOG_BASE; + + // Calculate number of digits of c[0]. + //if (Math.ceil(Math.log(c[0] + 1) / Math.LN10) == i) { + if (String(c[0]).length == i) { + + for (i = 0; i < c.length; i++) { + n = c[i]; + if (n < 0 || n >= BASE || n !== mathfloor(n)) break out; + } + + // Last element cannot be zero, unless it is the only element. + if (n !== 0) return true; + } + } + + // Infinity/NaN + } else if (c === null && e === null && (s === null || s === 1 || s === -1)) { + return true; + } + + throw Error + (bignumberError + 'Invalid BigNumber: ' + v); + }; + + + /* + * Return a new BigNumber whose value is the maximum of the arguments. + * + * arguments {number|string|BigNumber} + */ + BigNumber.maximum = BigNumber.max = function () { + return maxOrMin(arguments, -1); + }; + + + /* + * Return a new BigNumber whose value is the minimum of the arguments. + * + * arguments {number|string|BigNumber} + */ + BigNumber.minimum = BigNumber.min = function () { + return maxOrMin(arguments, 1); + }; + + + /* + * Return a new BigNumber with a random value equal to or greater than 0 and less than 1, + * and with dp, or DECIMAL_PLACES if dp is omitted, decimal places (or less if trailing + * zeros are produced). + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp}' + * '[BigNumber Error] crypto unavailable' + */ + BigNumber.random = (function () { + var pow2_53 = 0x20000000000000; + + // Return a 53 bit integer n, where 0 <= n < 9007199254740992. + // Check if Math.random() produces more than 32 bits of randomness. + // If it does, assume at least 53 bits are produced, otherwise assume at least 30 bits. + // 0x40000000 is 2^30, 0x800000 is 2^23, 0x1fffff is 2^21 - 1. + var random53bitInt = (Math.random() * pow2_53) & 0x1fffff + ? function () { return mathfloor(Math.random() * pow2_53); } + : function () { return ((Math.random() * 0x40000000 | 0) * 0x800000) + + (Math.random() * 0x800000 | 0); }; + + return function (dp) { + var a, b, e, k, v, + i = 0, + c = [], + rand = new BigNumber(ONE); + + if (dp == null) dp = DECIMAL_PLACES; + else intCheck(dp, 0, MAX); + + k = mathceil(dp / LOG_BASE); + + if (CRYPTO) { + + // Browsers supporting crypto.getRandomValues. + if (crypto.getRandomValues) { + + a = crypto.getRandomValues(new Uint32Array(k *= 2)); + + for (; i < k;) { + + // 53 bits: + // ((Math.pow(2, 32) - 1) * Math.pow(2, 21)).toString(2) + // 11111 11111111 11111111 11111111 11100000 00000000 00000000 + // ((Math.pow(2, 32) - 1) >>> 11).toString(2) + // 11111 11111111 11111111 + // 0x20000 is 2^21. + v = a[i] * 0x20000 + (a[i + 1] >>> 11); + + // Rejection sampling: + // 0 <= v < 9007199254740992 + // Probability that v >= 9e15, is + // 7199254740992 / 9007199254740992 ~= 0.0008, i.e. 1 in 1251 + if (v >= 9e15) { + b = crypto.getRandomValues(new Uint32Array(2)); + a[i] = b[0]; + a[i + 1] = b[1]; + } else { + + // 0 <= v <= 8999999999999999 + // 0 <= (v % 1e14) <= 99999999999999 + c.push(v % 1e14); + i += 2; + } + } + i = k / 2; + + // Node.js supporting crypto.randomBytes. + } else if (crypto.randomBytes) { + + // buffer + a = crypto.randomBytes(k *= 7); + + for (; i < k;) { + + // 0x1000000000000 is 2^48, 0x10000000000 is 2^40 + // 0x100000000 is 2^32, 0x1000000 is 2^24 + // 11111 11111111 11111111 11111111 11111111 11111111 11111111 + // 0 <= v < 9007199254740992 + v = ((a[i] & 31) * 0x1000000000000) + (a[i + 1] * 0x10000000000) + + (a[i + 2] * 0x100000000) + (a[i + 3] * 0x1000000) + + (a[i + 4] << 16) + (a[i + 5] << 8) + a[i + 6]; + + if (v >= 9e15) { + crypto.randomBytes(7).copy(a, i); + } else { + + // 0 <= (v % 1e14) <= 99999999999999 + c.push(v % 1e14); + i += 7; + } + } + i = k / 7; + } else { + CRYPTO = false; + throw Error + (bignumberError + 'crypto unavailable'); + } + } + + // Use Math.random. + if (!CRYPTO) { + + for (; i < k;) { + v = random53bitInt(); + if (v < 9e15) c[i++] = v % 1e14; + } + } + + k = c[--i]; + dp %= LOG_BASE; + + // Convert trailing digits to zeros according to dp. + if (k && dp) { + v = POWS_TEN[LOG_BASE - dp]; + c[i] = mathfloor(k / v) * v; + } + + // Remove trailing elements which are zero. + for (; c[i] === 0; c.pop(), i--); + + // Zero? + if (i < 0) { + c = [e = 0]; + } else { + + // Remove leading elements which are zero and adjust exponent accordingly. + for (e = -1 ; c[0] === 0; c.splice(0, 1), e -= LOG_BASE); + + // Count the digits of the first element of c to determine leading zeros, and... + for (i = 1, v = c[0]; v >= 10; v /= 10, i++); + + // adjust the exponent accordingly. + if (i < LOG_BASE) e -= LOG_BASE - i; + } + + rand.e = e; + rand.c = c; + return rand; + }; + })(); + + + /* + * Return a BigNumber whose value is the sum of the arguments. + * + * arguments {number|string|BigNumber} + */ + BigNumber.sum = function () { + var i = 1, + args = arguments, + sum = new BigNumber(args[0]); + for (; i < args.length;) sum = sum.plus(args[i++]); + return sum; + }; + + + // PRIVATE FUNCTIONS + + + // Called by BigNumber and BigNumber.prototype.toString. + convertBase = (function () { + var decimal = '0123456789'; + + /* + * Convert string of baseIn to an array of numbers of baseOut. + * Eg. toBaseOut('255', 10, 16) returns [15, 15]. + * Eg. toBaseOut('ff', 16, 10) returns [2, 5, 5]. + */ + function toBaseOut(str, baseIn, baseOut, alphabet) { + var j, + arr = [0], + arrL, + i = 0, + len = str.length; + + for (; i < len;) { + for (arrL = arr.length; arrL--; arr[arrL] *= baseIn); + + arr[0] += alphabet.indexOf(str.charAt(i++)); + + for (j = 0; j < arr.length; j++) { + + if (arr[j] > baseOut - 1) { + if (arr[j + 1] == null) arr[j + 1] = 0; + arr[j + 1] += arr[j] / baseOut | 0; + arr[j] %= baseOut; + } + } + } + + return arr.reverse(); + } + + // Convert a numeric string of baseIn to a numeric string of baseOut. + // If the caller is toString, we are converting from base 10 to baseOut. + // If the caller is BigNumber, we are converting from baseIn to base 10. + return function (str, baseIn, baseOut, sign, callerIsToString) { + var alphabet, d, e, k, r, x, xc, y, + i = str.indexOf('.'), + dp = DECIMAL_PLACES, + rm = ROUNDING_MODE; + + // Non-integer. + if (i >= 0) { + k = POW_PRECISION; + + // Unlimited precision. + POW_PRECISION = 0; + str = str.replace('.', ''); + y = new BigNumber(baseIn); + x = y.pow(str.length - i); + POW_PRECISION = k; + + // Convert str as if an integer, then restore the fraction part by dividing the + // result by its base raised to a power. + + y.c = toBaseOut(toFixedPoint(coeffToString(x.c), x.e, '0'), + 10, baseOut, decimal); + y.e = y.c.length; + } + + // Convert the number as integer. + + xc = toBaseOut(str, baseIn, baseOut, callerIsToString + ? (alphabet = ALPHABET, decimal) + : (alphabet = decimal, ALPHABET)); + + // xc now represents str as an integer and converted to baseOut. e is the exponent. + e = k = xc.length; + + // Remove trailing zeros. + for (; xc[--k] == 0; xc.pop()); + + // Zero? + if (!xc[0]) return alphabet.charAt(0); + + // Does str represent an integer? If so, no need for the division. + if (i < 0) { + --e; + } else { + x.c = xc; + x.e = e; + + // The sign is needed for correct rounding. + x.s = sign; + x = div(x, y, dp, rm, baseOut); + xc = x.c; + r = x.r; + e = x.e; + } + + // xc now represents str converted to baseOut. + + // The index of the rounding digit. + d = e + dp + 1; + + // The rounding digit: the digit to the right of the digit that may be rounded up. + i = xc[d]; + + // Look at the rounding digits and mode to determine whether to round up. + + k = baseOut / 2; + r = r || d < 0 || xc[d + 1] != null; + + r = rm < 4 ? (i != null || r) && (rm == 0 || rm == (x.s < 0 ? 3 : 2)) + : i > k || i == k &&(rm == 4 || r || rm == 6 && xc[d - 1] & 1 || + rm == (x.s < 0 ? 8 : 7)); + + // If the index of the rounding digit is not greater than zero, or xc represents + // zero, then the result of the base conversion is zero or, if rounding up, a value + // such as 0.00001. + if (d < 1 || !xc[0]) { + + // 1^-dp or 0 + str = r ? toFixedPoint(alphabet.charAt(1), -dp, alphabet.charAt(0)) : alphabet.charAt(0); + } else { + + // Truncate xc to the required number of decimal places. + xc.length = d; + + // Round up? + if (r) { + + // Rounding up may mean the previous digit has to be rounded up and so on. + for (--baseOut; ++xc[--d] > baseOut;) { + xc[d] = 0; + + if (!d) { + ++e; + xc = [1].concat(xc); + } + } + } + + // Determine trailing zeros. + for (k = xc.length; !xc[--k];); + + // E.g. [4, 11, 15] becomes 4bf. + for (i = 0, str = ''; i <= k; str += alphabet.charAt(xc[i++])); + + // Add leading zeros, decimal point and trailing zeros as required. + str = toFixedPoint(str, e, alphabet.charAt(0)); + } + + // The caller will add the sign. + return str; + }; + })(); + + + // Perform division in the specified base. Called by div and convertBase. + div = (function () { + + // Assume non-zero x and k. + function multiply(x, k, base) { + var m, temp, xlo, xhi, + carry = 0, + i = x.length, + klo = k % SQRT_BASE, + khi = k / SQRT_BASE | 0; + + for (x = x.slice(); i--;) { + xlo = x[i] % SQRT_BASE; + xhi = x[i] / SQRT_BASE | 0; + m = khi * xlo + xhi * klo; + temp = klo * xlo + ((m % SQRT_BASE) * SQRT_BASE) + carry; + carry = (temp / base | 0) + (m / SQRT_BASE | 0) + khi * xhi; + x[i] = temp % base; + } + + if (carry) x = [carry].concat(x); + + return x; + } + + function compare(a, b, aL, bL) { + var i, cmp; + + if (aL != bL) { + cmp = aL > bL ? 1 : -1; + } else { + + for (i = cmp = 0; i < aL; i++) { + + if (a[i] != b[i]) { + cmp = a[i] > b[i] ? 1 : -1; + break; + } + } + } + + return cmp; + } + + function subtract(a, b, aL, base) { + var i = 0; + + // Subtract b from a. + for (; aL--;) { + a[aL] -= i; + i = a[aL] < b[aL] ? 1 : 0; + a[aL] = i * base + a[aL] - b[aL]; + } + + // Remove leading zeros. + for (; !a[0] && a.length > 1; a.splice(0, 1)); + } + + // x: dividend, y: divisor. + return function (x, y, dp, rm, base) { + var cmp, e, i, more, n, prod, prodL, q, qc, rem, remL, rem0, xi, xL, yc0, + yL, yz, + s = x.s == y.s ? 1 : -1, + xc = x.c, + yc = y.c; + + // Either NaN, Infinity or 0? + if (!xc || !xc[0] || !yc || !yc[0]) { + + return new BigNumber( + + // Return NaN if either NaN, or both Infinity or 0. + !x.s || !y.s || (xc ? yc && xc[0] == yc[0] : !yc) ? NaN : + + // Return ±0 if x is ±0 or y is ±Infinity, or return ±Infinity as y is ±0. + xc && xc[0] == 0 || !yc ? s * 0 : s / 0 + ); + } + + q = new BigNumber(s); + qc = q.c = []; + e = x.e - y.e; + s = dp + e + 1; + + if (!base) { + base = BASE; + e = bitFloor(x.e / LOG_BASE) - bitFloor(y.e / LOG_BASE); + s = s / LOG_BASE | 0; + } + + // Result exponent may be one less then the current value of e. + // The coefficients of the BigNumbers from convertBase may have trailing zeros. + for (i = 0; yc[i] == (xc[i] || 0); i++); + + if (yc[i] > (xc[i] || 0)) e--; + + if (s < 0) { + qc.push(1); + more = true; + } else { + xL = xc.length; + yL = yc.length; + i = 0; + s += 2; + + // Normalise xc and yc so highest order digit of yc is >= base / 2. + + n = mathfloor(base / (yc[0] + 1)); + + // Not necessary, but to handle odd bases where yc[0] == (base / 2) - 1. + // if (n > 1 || n++ == 1 && yc[0] < base / 2) { + if (n > 1) { + yc = multiply(yc, n, base); + xc = multiply(xc, n, base); + yL = yc.length; + xL = xc.length; + } + + xi = yL; + rem = xc.slice(0, yL); + remL = rem.length; + + // Add zeros to make remainder as long as divisor. + for (; remL < yL; rem[remL++] = 0); + yz = yc.slice(); + yz = [0].concat(yz); + yc0 = yc[0]; + if (yc[1] >= base / 2) yc0++; + // Not necessary, but to prevent trial digit n > base, when using base 3. + // else if (base == 3 && yc0 == 1) yc0 = 1 + 1e-15; + + do { + n = 0; + + // Compare divisor and remainder. + cmp = compare(yc, rem, yL, remL); + + // If divisor < remainder. + if (cmp < 0) { + + // Calculate trial digit, n. + + rem0 = rem[0]; + if (yL != remL) rem0 = rem0 * base + (rem[1] || 0); + + // n is how many times the divisor goes into the current remainder. + n = mathfloor(rem0 / yc0); + + // Algorithm: + // product = divisor multiplied by trial digit (n). + // Compare product and remainder. + // If product is greater than remainder: + // Subtract divisor from product, decrement trial digit. + // Subtract product from remainder. + // If product was less than remainder at the last compare: + // Compare new remainder and divisor. + // If remainder is greater than divisor: + // Subtract divisor from remainder, increment trial digit. + + if (n > 1) { + + // n may be > base only when base is 3. + if (n >= base) n = base - 1; + + // product = divisor * trial digit. + prod = multiply(yc, n, base); + prodL = prod.length; + remL = rem.length; + + // Compare product and remainder. + // If product > remainder then trial digit n too high. + // n is 1 too high about 5% of the time, and is not known to have + // ever been more than 1 too high. + while (compare(prod, rem, prodL, remL) == 1) { + n--; + + // Subtract divisor from product. + subtract(prod, yL < prodL ? yz : yc, prodL, base); + prodL = prod.length; + cmp = 1; + } + } else { + + // n is 0 or 1, cmp is -1. + // If n is 0, there is no need to compare yc and rem again below, + // so change cmp to 1 to avoid it. + // If n is 1, leave cmp as -1, so yc and rem are compared again. + if (n == 0) { + + // divisor < remainder, so n must be at least 1. + cmp = n = 1; + } + + // product = divisor + prod = yc.slice(); + prodL = prod.length; + } + + if (prodL < remL) prod = [0].concat(prod); + + // Subtract product from remainder. + subtract(rem, prod, remL, base); + remL = rem.length; + + // If product was < remainder. + if (cmp == -1) { + + // Compare divisor and new remainder. + // If divisor < new remainder, subtract divisor from remainder. + // Trial digit n too low. + // n is 1 too low about 5% of the time, and very rarely 2 too low. + while (compare(yc, rem, yL, remL) < 1) { + n++; + + // Subtract divisor from remainder. + subtract(rem, yL < remL ? yz : yc, remL, base); + remL = rem.length; + } + } + } else if (cmp === 0) { + n++; + rem = [0]; + } // else cmp === 1 and n will be 0 + + // Add the next digit, n, to the result array. + qc[i++] = n; + + // Update the remainder. + if (rem[0]) { + rem[remL++] = xc[xi] || 0; + } else { + rem = [xc[xi]]; + remL = 1; + } + } while ((xi++ < xL || rem[0] != null) && s--); + + more = rem[0] != null; + + // Leading zero? + if (!qc[0]) qc.splice(0, 1); + } + + if (base == BASE) { + + // To calculate q.e, first get the number of digits of qc[0]. + for (i = 1, s = qc[0]; s >= 10; s /= 10, i++); + + round(q, dp + (q.e = i + e * LOG_BASE - 1) + 1, rm, more); + + // Caller is convertBase. + } else { + q.e = e; + q.r = +more; + } + + return q; + }; + })(); + + + /* + * Return a string representing the value of BigNumber n in fixed-point or exponential + * notation rounded to the specified decimal places or significant digits. + * + * n: a BigNumber. + * i: the index of the last digit required (i.e. the digit that may be rounded up). + * rm: the rounding mode. + * id: 1 (toExponential) or 2 (toPrecision). + */ + function format(n, i, rm, id) { + var c0, e, ne, len, str; + + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); + + if (!n.c) return n.toString(); + + c0 = n.c[0]; + ne = n.e; + + if (i == null) { + str = coeffToString(n.c); + str = id == 1 || id == 2 && (ne <= TO_EXP_NEG || ne >= TO_EXP_POS) + ? toExponential(str, ne) + : toFixedPoint(str, ne, '0'); + } else { + n = round(new BigNumber(n), i, rm); + + // n.e may have changed if the value was rounded up. + e = n.e; + + str = coeffToString(n.c); + len = str.length; + + // toPrecision returns exponential notation if the number of significant digits + // specified is less than the number of digits necessary to represent the integer + // part of the value in fixed-point notation. + + // Exponential notation. + if (id == 1 || id == 2 && (i <= e || e <= TO_EXP_NEG)) { + + // Append zeros? + for (; len < i; str += '0', len++); + str = toExponential(str, e); + + // Fixed-point notation. + } else { + i -= ne; + str = toFixedPoint(str, e, '0'); + + // Append zeros? + if (e + 1 > len) { + if (--i > 0) for (str += '.'; i--; str += '0'); + } else { + i += e - len; + if (i > 0) { + if (e + 1 == len) str += '.'; + for (; i--; str += '0'); + } + } + } + } + + return n.s < 0 && c0 ? '-' + str : str; + } + + + // Handle BigNumber.max and BigNumber.min. + // If any number is NaN, return NaN. + function maxOrMin(args, n) { + var k, y, + i = 1, + x = new BigNumber(args[0]); + + for (; i < args.length; i++) { + y = new BigNumber(args[i]); + if (!y.s || (k = compare(x, y)) === n || k === 0 && x.s === n) { + x = y; + } + } + + return x; + } + + + /* + * Strip trailing zeros, calculate base 10 exponent and check against MIN_EXP and MAX_EXP. + * Called by minus, plus and times. + */ + function normalise(n, c, e) { + var i = 1, + j = c.length; + + // Remove trailing zeros. + for (; !c[--j]; c.pop()); + + // Calculate the base 10 exponent. First get the number of digits of c[0]. + for (j = c[0]; j >= 10; j /= 10, i++); + + // Overflow? + if ((e = i + e * LOG_BASE - 1) > MAX_EXP) { + + // Infinity. + n.c = n.e = null; + + // Underflow? + } else if (e < MIN_EXP) { + + // Zero. + n.c = [n.e = 0]; + } else { + n.e = e; + n.c = c; + } + + return n; + } + + + // Handle values that fail the validity test in BigNumber. + parseNumeric = (function () { + var basePrefix = /^(-?)0([xbo])(?=\w[\w.]*$)/i, + dotAfter = /^([^.]+)\.$/, + dotBefore = /^\.([^.]+)$/, + isInfinityOrNaN = /^-?(Infinity|NaN)$/, + whitespaceOrPlus = /^\s*\+(?=[\w.])|^\s+|\s+$/g; + + return function (x, str, isNum, b) { + var base, + s = isNum ? str : str.replace(whitespaceOrPlus, ''); + + // No exception on ±Infinity or NaN. + if (isInfinityOrNaN.test(s)) { + x.s = isNaN(s) ? null : s < 0 ? -1 : 1; + } else { + if (!isNum) { + + // basePrefix = /^(-?)0([xbo])(?=\w[\w.]*$)/i + s = s.replace(basePrefix, function (m, p1, p2) { + base = (p2 = p2.toLowerCase()) == 'x' ? 16 : p2 == 'b' ? 2 : 8; + return !b || b == base ? p1 : m; + }); + + if (b) { + base = b; + + // E.g. '1.' to '1', '.1' to '0.1' + s = s.replace(dotAfter, '$1').replace(dotBefore, '0.$1'); + } + + if (str != s) return new BigNumber(s, base); + } + + // '[BigNumber Error] Not a number: {n}' + // '[BigNumber Error] Not a base {b} number: {n}' + if (BigNumber.DEBUG) { + throw Error + (bignumberError + 'Not a' + (b ? ' base ' + b : '') + ' number: ' + str); + } + + // NaN + x.s = null; + } + + x.c = x.e = null; + } + })(); + + + /* + * Round x to sd significant digits using rounding mode rm. Check for over/under-flow. + * If r is truthy, it is known that there are more digits after the rounding digit. + */ + function round(x, sd, rm, r) { + var d, i, j, k, n, ni, rd, + xc = x.c, + pows10 = POWS_TEN; + + // if x is not Infinity or NaN... + if (xc) { + + // rd is the rounding digit, i.e. the digit after the digit that may be rounded up. + // n is a base 1e14 number, the value of the element of array x.c containing rd. + // ni is the index of n within x.c. + // d is the number of digits of n. + // i is the index of rd within n including leading zeros. + // j is the actual index of rd within n (if < 0, rd is a leading zero). + out: { + + // Get the number of digits of the first element of xc. + for (d = 1, k = xc[0]; k >= 10; k /= 10, d++); + i = sd - d; + + // If the rounding digit is in the first element of xc... + if (i < 0) { + i += LOG_BASE; + j = sd; + n = xc[ni = 0]; + + // Get the rounding digit at index j of n. + rd = mathfloor(n / pows10[d - j - 1] % 10); + } else { + ni = mathceil((i + 1) / LOG_BASE); + + if (ni >= xc.length) { + + if (r) { + + // Needed by sqrt. + for (; xc.length <= ni; xc.push(0)); + n = rd = 0; + d = 1; + i %= LOG_BASE; + j = i - LOG_BASE + 1; + } else { + break out; + } + } else { + n = k = xc[ni]; + + // Get the number of digits of n. + for (d = 1; k >= 10; k /= 10, d++); + + // Get the index of rd within n. + i %= LOG_BASE; + + // Get the index of rd within n, adjusted for leading zeros. + // The number of leading zeros of n is given by LOG_BASE - d. + j = i - LOG_BASE + d; + + // Get the rounding digit at index j of n. + rd = j < 0 ? 0 : mathfloor(n / pows10[d - j - 1] % 10); + } + } + + r = r || sd < 0 || + + // Are there any non-zero digits after the rounding digit? + // The expression n % pows10[d - j - 1] returns all digits of n to the right + // of the digit at j, e.g. if n is 908714 and j is 2, the expression gives 714. + xc[ni + 1] != null || (j < 0 ? n : n % pows10[d - j - 1]); + + r = rm < 4 + ? (rd || r) && (rm == 0 || rm == (x.s < 0 ? 3 : 2)) + : rd > 5 || rd == 5 && (rm == 4 || r || rm == 6 && + + // Check whether the digit to the left of the rounding digit is odd. + ((i > 0 ? j > 0 ? n / pows10[d - j] : 0 : xc[ni - 1]) % 10) & 1 || + rm == (x.s < 0 ? 8 : 7)); + + if (sd < 1 || !xc[0]) { + xc.length = 0; + + if (r) { + + // Convert sd to decimal places. + sd -= x.e + 1; + + // 1, 0.1, 0.01, 0.001, 0.0001 etc. + xc[0] = pows10[(LOG_BASE - sd % LOG_BASE) % LOG_BASE]; + x.e = -sd || 0; + } else { + + // Zero. + xc[0] = x.e = 0; + } + + return x; + } + + // Remove excess digits. + if (i == 0) { + xc.length = ni; + k = 1; + ni--; + } else { + xc.length = ni + 1; + k = pows10[LOG_BASE - i]; + + // E.g. 56700 becomes 56000 if 7 is the rounding digit. + // j > 0 means i > number of leading zeros of n. + xc[ni] = j > 0 ? mathfloor(n / pows10[d - j] % pows10[j]) * k : 0; + } + + // Round up? + if (r) { + + for (; ;) { + + // If the digit to be rounded up is in the first element of xc... + if (ni == 0) { + + // i will be the length of xc[0] before k is added. + for (i = 1, j = xc[0]; j >= 10; j /= 10, i++); + j = xc[0] += k; + for (k = 1; j >= 10; j /= 10, k++); + + // if i != k the length has increased. + if (i != k) { + x.e++; + if (xc[0] == BASE) xc[0] = 1; + } + + break; + } else { + xc[ni] += k; + if (xc[ni] != BASE) break; + xc[ni--] = 0; + k = 1; + } + } + } + + // Remove trailing zeros. + for (i = xc.length; xc[--i] === 0; xc.pop()); + } + + // Overflow? Infinity. + if (x.e > MAX_EXP) { + x.c = x.e = null; + + // Underflow? Zero. + } else if (x.e < MIN_EXP) { + x.c = [x.e = 0]; + } + } + + return x; + } + + + function valueOf(n) { + var str, + e = n.e; + + if (e === null) return n.toString(); + + str = coeffToString(n.c); + + str = e <= TO_EXP_NEG || e >= TO_EXP_POS + ? toExponential(str, e) + : toFixedPoint(str, e, '0'); + + return n.s < 0 ? '-' + str : str; + } + + + // PROTOTYPE/INSTANCE METHODS + + + /* + * Return a new BigNumber whose value is the absolute value of this BigNumber. + */ + P.absoluteValue = P.abs = function () { + var x = new BigNumber(this); + if (x.s < 0) x.s = 1; + return x; + }; + + + /* + * Return + * 1 if the value of this BigNumber is greater than the value of BigNumber(y, b), + * -1 if the value of this BigNumber is less than the value of BigNumber(y, b), + * 0 if they have the same value, + * or null if the value of either is NaN. + */ + P.comparedTo = function (y, b) { + return compare(this, new BigNumber(y, b)); + }; + + + /* + * If dp is undefined or null or true or false, return the number of decimal places of the + * value of this BigNumber, or null if the value of this BigNumber is ±Infinity or NaN. + * + * Otherwise, if dp is a number, return a new BigNumber whose value is the value of this + * BigNumber rounded to a maximum of dp decimal places using rounding mode rm, or + * ROUNDING_MODE if rm is omitted. + * + * [dp] {number} Decimal places: integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + */ + P.decimalPlaces = P.dp = function (dp, rm) { + var c, n, v, + x = this; + + if (dp != null) { + intCheck(dp, 0, MAX); + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); + + return round(new BigNumber(x), dp + x.e + 1, rm); + } + + if (!(c = x.c)) return null; + n = ((v = c.length - 1) - bitFloor(this.e / LOG_BASE)) * LOG_BASE; + + // Subtract the number of trailing zeros of the last number. + if (v = c[v]) for (; v % 10 == 0; v /= 10, n--); + if (n < 0) n = 0; + + return n; + }; + + + /* + * n / 0 = I + * n / N = N + * n / I = 0 + * 0 / n = 0 + * 0 / 0 = N + * 0 / N = N + * 0 / I = 0 + * N / n = N + * N / 0 = N + * N / N = N + * N / I = N + * I / n = I + * I / 0 = I + * I / N = N + * I / I = N + * + * Return a new BigNumber whose value is the value of this BigNumber divided by the value of + * BigNumber(y, b), rounded according to DECIMAL_PLACES and ROUNDING_MODE. + */ + P.dividedBy = P.div = function (y, b) { + return div(this, new BigNumber(y, b), DECIMAL_PLACES, ROUNDING_MODE); + }; + + + /* + * Return a new BigNumber whose value is the integer part of dividing the value of this + * BigNumber by the value of BigNumber(y, b). + */ + P.dividedToIntegerBy = P.idiv = function (y, b) { + return div(this, new BigNumber(y, b), 0, 1); + }; + + + /* + * Return a BigNumber whose value is the value of this BigNumber exponentiated by n. + * + * If m is present, return the result modulo m. + * If n is negative round according to DECIMAL_PLACES and ROUNDING_MODE. + * If POW_PRECISION is non-zero and m is not present, round to POW_PRECISION using ROUNDING_MODE. + * + * The modular power operation works efficiently when x, n, and m are integers, otherwise it + * is equivalent to calculating x.exponentiatedBy(n).modulo(m) with a POW_PRECISION of 0. + * + * n {number|string|BigNumber} The exponent. An integer. + * [m] {number|string|BigNumber} The modulus. + * + * '[BigNumber Error] Exponent not an integer: {n}' + */ + P.exponentiatedBy = P.pow = function (n, m) { + var half, isModExp, i, k, more, nIsBig, nIsNeg, nIsOdd, y, + x = this; + + n = new BigNumber(n); + + // Allow NaN and ±Infinity, but not other non-integers. + if (n.c && !n.isInteger()) { + throw Error + (bignumberError + 'Exponent not an integer: ' + valueOf(n)); + } + + if (m != null) m = new BigNumber(m); + + // Exponent of MAX_SAFE_INTEGER is 15. + nIsBig = n.e > 14; + + // If x is NaN, ±Infinity, ±0 or ±1, or n is ±Infinity, NaN or ±0. + if (!x.c || !x.c[0] || x.c[0] == 1 && !x.e && x.c.length == 1 || !n.c || !n.c[0]) { + + // The sign of the result of pow when x is negative depends on the evenness of n. + // If +n overflows to ±Infinity, the evenness of n would be not be known. + y = new BigNumber(Math.pow(+valueOf(x), nIsBig ? n.s * (2 - isOdd(n)) : +valueOf(n))); + return m ? y.mod(m) : y; + } + + nIsNeg = n.s < 0; + + if (m) { + + // x % m returns NaN if abs(m) is zero, or m is NaN. + if (m.c ? !m.c[0] : !m.s) return new BigNumber(NaN); + + isModExp = !nIsNeg && x.isInteger() && m.isInteger(); + + if (isModExp) x = x.mod(m); + + // Overflow to ±Infinity: >=2**1e10 or >=1.0000024**1e15. + // Underflow to ±0: <=0.79**1e10 or <=0.9999975**1e15. + } else if (n.e > 9 && (x.e > 0 || x.e < -1 || (x.e == 0 + // [1, 240000000] + ? x.c[0] > 1 || nIsBig && x.c[1] >= 24e7 + // [80000000000000] [99999750000000] + : x.c[0] < 8e13 || nIsBig && x.c[0] <= 9999975e7))) { + + // If x is negative and n is odd, k = -0, else k = 0. + k = x.s < 0 && isOdd(n) ? -0 : 0; + + // If x >= 1, k = ±Infinity. + if (x.e > -1) k = 1 / k; + + // If n is negative return ±0, else return ±Infinity. + return new BigNumber(nIsNeg ? 1 / k : k); + + } else if (POW_PRECISION) { + + // Truncating each coefficient array to a length of k after each multiplication + // equates to truncating significant digits to POW_PRECISION + [28, 41], + // i.e. there will be a minimum of 28 guard digits retained. + k = mathceil(POW_PRECISION / LOG_BASE + 2); + } + + if (nIsBig) { + half = new BigNumber(0.5); + if (nIsNeg) n.s = 1; + nIsOdd = isOdd(n); + } else { + i = Math.abs(+valueOf(n)); + nIsOdd = i % 2; + } + + y = new BigNumber(ONE); + + // Performs 54 loop iterations for n of 9007199254740991. + for (; ;) { + + if (nIsOdd) { + y = y.times(x); + if (!y.c) break; + + if (k) { + if (y.c.length > k) y.c.length = k; + } else if (isModExp) { + y = y.mod(m); //y = y.minus(div(y, m, 0, MODULO_MODE).times(m)); + } + } + + if (i) { + i = mathfloor(i / 2); + if (i === 0) break; + nIsOdd = i % 2; + } else { + n = n.times(half); + round(n, n.e + 1, 1); + + if (n.e > 14) { + nIsOdd = isOdd(n); + } else { + i = +valueOf(n); + if (i === 0) break; + nIsOdd = i % 2; + } + } + + x = x.times(x); + + if (k) { + if (x.c && x.c.length > k) x.c.length = k; + } else if (isModExp) { + x = x.mod(m); //x = x.minus(div(x, m, 0, MODULO_MODE).times(m)); + } + } + + if (isModExp) return y; + if (nIsNeg) y = ONE.div(y); + + return m ? y.mod(m) : k ? round(y, POW_PRECISION, ROUNDING_MODE, more) : y; + }; + + + /* + * Return a new BigNumber whose value is the value of this BigNumber rounded to an integer + * using rounding mode rm, or ROUNDING_MODE if rm is omitted. + * + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {rm}' + */ + P.integerValue = function (rm) { + var n = new BigNumber(this); + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); + return round(n, n.e + 1, rm); + }; + + + /* + * Return true if the value of this BigNumber is equal to the value of BigNumber(y, b), + * otherwise return false. + */ + P.isEqualTo = P.eq = function (y, b) { + return compare(this, new BigNumber(y, b)) === 0; + }; + + + /* + * Return true if the value of this BigNumber is a finite number, otherwise return false. + */ + P.isFinite = function () { + return !!this.c; + }; + + + /* + * Return true if the value of this BigNumber is greater than the value of BigNumber(y, b), + * otherwise return false. + */ + P.isGreaterThan = P.gt = function (y, b) { + return compare(this, new BigNumber(y, b)) > 0; + }; + + + /* + * Return true if the value of this BigNumber is greater than or equal to the value of + * BigNumber(y, b), otherwise return false. + */ + P.isGreaterThanOrEqualTo = P.gte = function (y, b) { + return (b = compare(this, new BigNumber(y, b))) === 1 || b === 0; + + }; + + + /* + * Return true if the value of this BigNumber is an integer, otherwise return false. + */ + P.isInteger = function () { + return !!this.c && bitFloor(this.e / LOG_BASE) > this.c.length - 2; + }; + + + /* + * Return true if the value of this BigNumber is less than the value of BigNumber(y, b), + * otherwise return false. + */ + P.isLessThan = P.lt = function (y, b) { + return compare(this, new BigNumber(y, b)) < 0; + }; + + + /* + * Return true if the value of this BigNumber is less than or equal to the value of + * BigNumber(y, b), otherwise return false. + */ + P.isLessThanOrEqualTo = P.lte = function (y, b) { + return (b = compare(this, new BigNumber(y, b))) === -1 || b === 0; + }; + + + /* + * Return true if the value of this BigNumber is NaN, otherwise return false. + */ + P.isNaN = function () { + return !this.s; + }; + + + /* + * Return true if the value of this BigNumber is negative, otherwise return false. + */ + P.isNegative = function () { + return this.s < 0; + }; + + + /* + * Return true if the value of this BigNumber is positive, otherwise return false. + */ + P.isPositive = function () { + return this.s > 0; + }; + + + /* + * Return true if the value of this BigNumber is 0 or -0, otherwise return false. + */ + P.isZero = function () { + return !!this.c && this.c[0] == 0; + }; + + + /* + * n - 0 = n + * n - N = N + * n - I = -I + * 0 - n = -n + * 0 - 0 = 0 + * 0 - N = N + * 0 - I = -I + * N - n = N + * N - 0 = N + * N - N = N + * N - I = N + * I - n = I + * I - 0 = I + * I - N = N + * I - I = N + * + * Return a new BigNumber whose value is the value of this BigNumber minus the value of + * BigNumber(y, b). + */ + P.minus = function (y, b) { + var i, j, t, xLTy, + x = this, + a = x.s; + + y = new BigNumber(y, b); + b = y.s; + + // Either NaN? + if (!a || !b) return new BigNumber(NaN); + + // Signs differ? + if (a != b) { + y.s = -b; + return x.plus(y); + } + + var xe = x.e / LOG_BASE, + ye = y.e / LOG_BASE, + xc = x.c, + yc = y.c; + + if (!xe || !ye) { + + // Either Infinity? + if (!xc || !yc) return xc ? (y.s = -b, y) : new BigNumber(yc ? x : NaN); + + // Either zero? + if (!xc[0] || !yc[0]) { + + // Return y if y is non-zero, x if x is non-zero, or zero if both are zero. + return yc[0] ? (y.s = -b, y) : new BigNumber(xc[0] ? x : + + // IEEE 754 (2008) 6.3: n - n = -0 when rounding to -Infinity + ROUNDING_MODE == 3 ? -0 : 0); + } + } + + xe = bitFloor(xe); + ye = bitFloor(ye); + xc = xc.slice(); + + // Determine which is the bigger number. + if (a = xe - ye) { + + if (xLTy = a < 0) { + a = -a; + t = xc; + } else { + ye = xe; + t = yc; + } + + t.reverse(); + + // Prepend zeros to equalise exponents. + for (b = a; b--; t.push(0)); + t.reverse(); + } else { + + // Exponents equal. Check digit by digit. + j = (xLTy = (a = xc.length) < (b = yc.length)) ? a : b; + + for (a = b = 0; b < j; b++) { + + if (xc[b] != yc[b]) { + xLTy = xc[b] < yc[b]; + break; + } + } + } + + // x < y? Point xc to the array of the bigger number. + if (xLTy) { + t = xc; + xc = yc; + yc = t; + y.s = -y.s; + } + + b = (j = yc.length) - (i = xc.length); + + // Append zeros to xc if shorter. + // No need to add zeros to yc if shorter as subtract only needs to start at yc.length. + if (b > 0) for (; b--; xc[i++] = 0); + b = BASE - 1; + + // Subtract yc from xc. + for (; j > a;) { + + if (xc[--j] < yc[j]) { + for (i = j; i && !xc[--i]; xc[i] = b); + --xc[i]; + xc[j] += BASE; + } + + xc[j] -= yc[j]; + } + + // Remove leading zeros and adjust exponent accordingly. + for (; xc[0] == 0; xc.splice(0, 1), --ye); + + // Zero? + if (!xc[0]) { + + // Following IEEE 754 (2008) 6.3, + // n - n = +0 but n - n = -0 when rounding towards -Infinity. + y.s = ROUNDING_MODE == 3 ? -1 : 1; + y.c = [y.e = 0]; + return y; + } + + // No need to check for Infinity as +x - +y != Infinity && -x - -y != Infinity + // for finite x and y. + return normalise(y, xc, ye); + }; + + + /* + * n % 0 = N + * n % N = N + * n % I = n + * 0 % n = 0 + * -0 % n = -0 + * 0 % 0 = N + * 0 % N = N + * 0 % I = 0 + * N % n = N + * N % 0 = N + * N % N = N + * N % I = N + * I % n = N + * I % 0 = N + * I % N = N + * I % I = N + * + * Return a new BigNumber whose value is the value of this BigNumber modulo the value of + * BigNumber(y, b). The result depends on the value of MODULO_MODE. + */ + P.modulo = P.mod = function (y, b) { + var q, s, + x = this; + + y = new BigNumber(y, b); + + // Return NaN if x is Infinity or NaN, or y is NaN or zero. + if (!x.c || !y.s || y.c && !y.c[0]) { + return new BigNumber(NaN); + + // Return x if y is Infinity or x is zero. + } else if (!y.c || x.c && !x.c[0]) { + return new BigNumber(x); + } + + if (MODULO_MODE == 9) { + + // Euclidian division: q = sign(y) * floor(x / abs(y)) + // r = x - qy where 0 <= r < abs(y) + s = y.s; + y.s = 1; + q = div(x, y, 0, 3); + y.s = s; + q.s *= s; + } else { + q = div(x, y, 0, MODULO_MODE); + } + + y = x.minus(q.times(y)); + + // To match JavaScript %, ensure sign of zero is sign of dividend. + if (!y.c[0] && MODULO_MODE == 1) y.s = x.s; + + return y; + }; + + + /* + * n * 0 = 0 + * n * N = N + * n * I = I + * 0 * n = 0 + * 0 * 0 = 0 + * 0 * N = N + * 0 * I = N + * N * n = N + * N * 0 = N + * N * N = N + * N * I = N + * I * n = I + * I * 0 = N + * I * N = N + * I * I = I + * + * Return a new BigNumber whose value is the value of this BigNumber multiplied by the value + * of BigNumber(y, b). + */ + P.multipliedBy = P.times = function (y, b) { + var c, e, i, j, k, m, xcL, xlo, xhi, ycL, ylo, yhi, zc, + base, sqrtBase, + x = this, + xc = x.c, + yc = (y = new BigNumber(y, b)).c; + + // Either NaN, ±Infinity or ±0? + if (!xc || !yc || !xc[0] || !yc[0]) { + + // Return NaN if either is NaN, or one is 0 and the other is Infinity. + if (!x.s || !y.s || xc && !xc[0] && !yc || yc && !yc[0] && !xc) { + y.c = y.e = y.s = null; + } else { + y.s *= x.s; + + // Return ±Infinity if either is ±Infinity. + if (!xc || !yc) { + y.c = y.e = null; + + // Return ±0 if either is ±0. + } else { + y.c = [0]; + y.e = 0; + } + } + + return y; + } + + e = bitFloor(x.e / LOG_BASE) + bitFloor(y.e / LOG_BASE); + y.s *= x.s; + xcL = xc.length; + ycL = yc.length; + + // Ensure xc points to longer array and xcL to its length. + if (xcL < ycL) { + zc = xc; + xc = yc; + yc = zc; + i = xcL; + xcL = ycL; + ycL = i; + } + + // Initialise the result array with zeros. + for (i = xcL + ycL, zc = []; i--; zc.push(0)); + + base = BASE; + sqrtBase = SQRT_BASE; + + for (i = ycL; --i >= 0;) { + c = 0; + ylo = yc[i] % sqrtBase; + yhi = yc[i] / sqrtBase | 0; + + for (k = xcL, j = i + k; j > i;) { + xlo = xc[--k] % sqrtBase; + xhi = xc[k] / sqrtBase | 0; + m = yhi * xlo + xhi * ylo; + xlo = ylo * xlo + ((m % sqrtBase) * sqrtBase) + zc[j] + c; + c = (xlo / base | 0) + (m / sqrtBase | 0) + yhi * xhi; + zc[j--] = xlo % base; + } + + zc[j] = c; + } + + if (c) { + ++e; + } else { + zc.splice(0, 1); + } + + return normalise(y, zc, e); + }; + + + /* + * Return a new BigNumber whose value is the value of this BigNumber negated, + * i.e. multiplied by -1. + */ + P.negated = function () { + var x = new BigNumber(this); + x.s = -x.s || null; + return x; + }; + + + /* + * n + 0 = n + * n + N = N + * n + I = I + * 0 + n = n + * 0 + 0 = 0 + * 0 + N = N + * 0 + I = I + * N + n = N + * N + 0 = N + * N + N = N + * N + I = N + * I + n = I + * I + 0 = I + * I + N = N + * I + I = I + * + * Return a new BigNumber whose value is the value of this BigNumber plus the value of + * BigNumber(y, b). + */ + P.plus = function (y, b) { + var t, + x = this, + a = x.s; + + y = new BigNumber(y, b); + b = y.s; + + // Either NaN? + if (!a || !b) return new BigNumber(NaN); + + // Signs differ? + if (a != b) { + y.s = -b; + return x.minus(y); + } + + var xe = x.e / LOG_BASE, + ye = y.e / LOG_BASE, + xc = x.c, + yc = y.c; + + if (!xe || !ye) { + + // Return ±Infinity if either ±Infinity. + if (!xc || !yc) return new BigNumber(a / 0); + + // Either zero? + // Return y if y is non-zero, x if x is non-zero, or zero if both are zero. + if (!xc[0] || !yc[0]) return yc[0] ? y : new BigNumber(xc[0] ? x : a * 0); + } + + xe = bitFloor(xe); + ye = bitFloor(ye); + xc = xc.slice(); + + // Prepend zeros to equalise exponents. Faster to use reverse then do unshifts. + if (a = xe - ye) { + if (a > 0) { + ye = xe; + t = yc; + } else { + a = -a; + t = xc; + } + + t.reverse(); + for (; a--; t.push(0)); + t.reverse(); + } + + a = xc.length; + b = yc.length; + + // Point xc to the longer array, and b to the shorter length. + if (a - b < 0) { + t = yc; + yc = xc; + xc = t; + b = a; + } + + // Only start adding at yc.length - 1 as the further digits of xc can be ignored. + for (a = 0; b;) { + a = (xc[--b] = xc[b] + yc[b] + a) / BASE | 0; + xc[b] = BASE === xc[b] ? 0 : xc[b] % BASE; + } + + if (a) { + xc = [a].concat(xc); + ++ye; + } + + // No need to check for zero, as +x + +y != 0 && -x + -y != 0 + // ye = MAX_EXP + 1 possible + return normalise(y, xc, ye); + }; + + + /* + * If sd is undefined or null or true or false, return the number of significant digits of + * the value of this BigNumber, or null if the value of this BigNumber is ±Infinity or NaN. + * If sd is true include integer-part trailing zeros in the count. + * + * Otherwise, if sd is a number, return a new BigNumber whose value is the value of this + * BigNumber rounded to a maximum of sd significant digits using rounding mode rm, or + * ROUNDING_MODE if rm is omitted. + * + * sd {number|boolean} number: significant digits: integer, 1 to MAX inclusive. + * boolean: whether to count integer-part trailing zeros: true or false. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {sd|rm}' + */ + P.precision = P.sd = function (sd, rm) { + var c, n, v, + x = this; + + if (sd != null && sd !== !!sd) { + intCheck(sd, 1, MAX); + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); + + return round(new BigNumber(x), sd, rm); + } + + if (!(c = x.c)) return null; + v = c.length - 1; + n = v * LOG_BASE + 1; + + if (v = c[v]) { + + // Subtract the number of trailing zeros of the last element. + for (; v % 10 == 0; v /= 10, n--); + + // Add the number of digits of the first element. + for (v = c[0]; v >= 10; v /= 10, n++); + } + + if (sd && x.e + 1 > n) n = x.e + 1; + + return n; + }; + + + /* + * Return a new BigNumber whose value is the value of this BigNumber shifted by k places + * (powers of 10). Shift to the right if n > 0, and to the left if n < 0. + * + * k {number} Integer, -MAX_SAFE_INTEGER to MAX_SAFE_INTEGER inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {k}' + */ + P.shiftedBy = function (k) { + intCheck(k, -MAX_SAFE_INTEGER, MAX_SAFE_INTEGER); + return this.times('1e' + k); + }; + + + /* + * sqrt(-n) = N + * sqrt(N) = N + * sqrt(-I) = N + * sqrt(I) = I + * sqrt(0) = 0 + * sqrt(-0) = -0 + * + * Return a new BigNumber whose value is the square root of the value of this BigNumber, + * rounded according to DECIMAL_PLACES and ROUNDING_MODE. + */ + P.squareRoot = P.sqrt = function () { + var m, n, r, rep, t, + x = this, + c = x.c, + s = x.s, + e = x.e, + dp = DECIMAL_PLACES + 4, + half = new BigNumber('0.5'); + + // Negative/NaN/Infinity/zero? + if (s !== 1 || !c || !c[0]) { + return new BigNumber(!s || s < 0 && (!c || c[0]) ? NaN : c ? x : 1 / 0); + } + + // Initial estimate. + s = Math.sqrt(+valueOf(x)); + + // Math.sqrt underflow/overflow? + // Pass x to Math.sqrt as integer, then adjust the exponent of the result. + if (s == 0 || s == 1 / 0) { + n = coeffToString(c); + if ((n.length + e) % 2 == 0) n += '0'; + s = Math.sqrt(+n); + e = bitFloor((e + 1) / 2) - (e < 0 || e % 2); + + if (s == 1 / 0) { + n = '5e' + e; + } else { + n = s.toExponential(); + n = n.slice(0, n.indexOf('e') + 1) + e; + } + + r = new BigNumber(n); + } else { + r = new BigNumber(s + ''); + } + + // Check for zero. + // r could be zero if MIN_EXP is changed after the this value was created. + // This would cause a division by zero (x/t) and hence Infinity below, which would cause + // coeffToString to throw. + if (r.c[0]) { + e = r.e; + s = e + dp; + if (s < 3) s = 0; + + // Newton-Raphson iteration. + for (; ;) { + t = r; + r = half.times(t.plus(div(x, t, dp, 1))); + + if (coeffToString(t.c).slice(0, s) === (n = coeffToString(r.c)).slice(0, s)) { + + // The exponent of r may here be one less than the final result exponent, + // e.g 0.0009999 (e-4) --> 0.001 (e-3), so adjust s so the rounding digits + // are indexed correctly. + if (r.e < e) --s; + n = n.slice(s - 3, s + 1); + + // The 4th rounding digit may be in error by -1 so if the 4 rounding digits + // are 9999 or 4999 (i.e. approaching a rounding boundary) continue the + // iteration. + if (n == '9999' || !rep && n == '4999') { + + // On the first iteration only, check to see if rounding up gives the + // exact result as the nines may infinitely repeat. + if (!rep) { + round(t, t.e + DECIMAL_PLACES + 2, 0); + + if (t.times(t).eq(x)) { + r = t; + break; + } + } + + dp += 4; + s += 4; + rep = 1; + } else { + + // If rounding digits are null, 0{0,4} or 50{0,3}, check for exact + // result. If not, then there are further digits and m will be truthy. + if (!+n || !+n.slice(1) && n.charAt(0) == '5') { + + // Truncate to the first rounding digit. + round(r, r.e + DECIMAL_PLACES + 2, 1); + m = !r.times(r).eq(x); + } + + break; + } + } + } + } + + return round(r, r.e + DECIMAL_PLACES + 1, ROUNDING_MODE, m); + }; + + + /* + * Return a string representing the value of this BigNumber in exponential notation and + * rounded using ROUNDING_MODE to dp fixed decimal places. + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + */ + P.toExponential = function (dp, rm) { + if (dp != null) { + intCheck(dp, 0, MAX); + dp++; + } + return format(this, dp, rm, 1); + }; + + + /* + * Return a string representing the value of this BigNumber in fixed-point notation rounding + * to dp fixed decimal places using rounding mode rm, or ROUNDING_MODE if rm is omitted. + * + * Note: as with JavaScript's number type, (-0).toFixed(0) is '0', + * but e.g. (-0.00001).toFixed(0) is '-0'. + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + */ + P.toFixed = function (dp, rm) { + if (dp != null) { + intCheck(dp, 0, MAX); + dp = dp + this.e + 1; + } + return format(this, dp, rm); + }; + + + /* + * Return a string representing the value of this BigNumber in fixed-point notation rounded + * using rm or ROUNDING_MODE to dp decimal places, and formatted according to the properties + * of the format or FORMAT object (see BigNumber.set). + * + * The formatting object may contain some or all of the properties shown below. + * + * FORMAT = { + * prefix: '', + * groupSize: 3, + * secondaryGroupSize: 0, + * groupSeparator: ',', + * decimalSeparator: '.', + * fractionGroupSize: 0, + * fractionGroupSeparator: '\xA0', // non-breaking space + * suffix: '' + * }; + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * [format] {object} Formatting options. See FORMAT pbject above. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + * '[BigNumber Error] Argument not an object: {format}' + */ + P.toFormat = function (dp, rm, format) { + var str, + x = this; + + if (format == null) { + if (dp != null && rm && typeof rm == 'object') { + format = rm; + rm = null; + } else if (dp && typeof dp == 'object') { + format = dp; + dp = rm = null; + } else { + format = FORMAT; + } + } else if (typeof format != 'object') { + throw Error + (bignumberError + 'Argument not an object: ' + format); + } + + str = x.toFixed(dp, rm); + + if (x.c) { + var i, + arr = str.split('.'), + g1 = +format.groupSize, + g2 = +format.secondaryGroupSize, + groupSeparator = format.groupSeparator || '', + intPart = arr[0], + fractionPart = arr[1], + isNeg = x.s < 0, + intDigits = isNeg ? intPart.slice(1) : intPart, + len = intDigits.length; + + if (g2) { + i = g1; + g1 = g2; + g2 = i; + len -= i; + } + + if (g1 > 0 && len > 0) { + i = len % g1 || g1; + intPart = intDigits.substr(0, i); + for (; i < len; i += g1) intPart += groupSeparator + intDigits.substr(i, g1); + if (g2 > 0) intPart += groupSeparator + intDigits.slice(i); + if (isNeg) intPart = '-' + intPart; + } + + str = fractionPart + ? intPart + (format.decimalSeparator || '') + ((g2 = +format.fractionGroupSize) + ? fractionPart.replace(new RegExp('\\d{' + g2 + '}\\B', 'g'), + '$&' + (format.fractionGroupSeparator || '')) + : fractionPart) + : intPart; + } + + return (format.prefix || '') + str + (format.suffix || ''); + }; + + + /* + * Return an array of two BigNumbers representing the value of this BigNumber as a simple + * fraction with an integer numerator and an integer denominator. + * The denominator will be a positive non-zero value less than or equal to the specified + * maximum denominator. If a maximum denominator is not specified, the denominator will be + * the lowest value necessary to represent the number exactly. + * + * [md] {number|string|BigNumber} Integer >= 1, or Infinity. The maximum denominator. + * + * '[BigNumber Error] Argument {not an integer|out of range} : {md}' + */ + P.toFraction = function (md) { + var d, d0, d1, d2, e, exp, n, n0, n1, q, r, s, + x = this, + xc = x.c; + + if (md != null) { + n = new BigNumber(md); + + // Throw if md is less than one or is not an integer, unless it is Infinity. + if (!n.isInteger() && (n.c || n.s !== 1) || n.lt(ONE)) { + throw Error + (bignumberError + 'Argument ' + + (n.isInteger() ? 'out of range: ' : 'not an integer: ') + valueOf(n)); + } + } + + if (!xc) return new BigNumber(x); + + d = new BigNumber(ONE); + n1 = d0 = new BigNumber(ONE); + d1 = n0 = new BigNumber(ONE); + s = coeffToString(xc); + + // Determine initial denominator. + // d is a power of 10 and the minimum max denominator that specifies the value exactly. + e = d.e = s.length - x.e - 1; + d.c[0] = POWS_TEN[(exp = e % LOG_BASE) < 0 ? LOG_BASE + exp : exp]; + md = !md || n.comparedTo(d) > 0 ? (e > 0 ? d : n1) : n; + + exp = MAX_EXP; + MAX_EXP = 1 / 0; + n = new BigNumber(s); + + // n0 = d1 = 0 + n0.c[0] = 0; + + for (; ;) { + q = div(n, d, 0, 1); + d2 = d0.plus(q.times(d1)); + if (d2.comparedTo(md) == 1) break; + d0 = d1; + d1 = d2; + n1 = n0.plus(q.times(d2 = n1)); + n0 = d2; + d = n.minus(q.times(d2 = d)); + n = d2; + } + + d2 = div(md.minus(d0), d1, 0, 1); + n0 = n0.plus(d2.times(n1)); + d0 = d0.plus(d2.times(d1)); + n0.s = n1.s = x.s; + e = e * 2; + + // Determine which fraction is closer to x, n0/d0 or n1/d1 + r = div(n1, d1, e, ROUNDING_MODE).minus(x).abs().comparedTo( + div(n0, d0, e, ROUNDING_MODE).minus(x).abs()) < 1 ? [n1, d1] : [n0, d0]; + + MAX_EXP = exp; + + return r; + }; + + + /* + * Return the value of this BigNumber converted to a number primitive. + */ + P.toNumber = function () { + return +valueOf(this); + }; + + + /* + * Return a string representing the value of this BigNumber rounded to sd significant digits + * using rounding mode rm or ROUNDING_MODE. If sd is less than the number of digits + * necessary to represent the integer part of the value in fixed-point notation, then use + * exponential notation. + * + * [sd] {number} Significant digits. Integer, 1 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {sd|rm}' + */ + P.toPrecision = function (sd, rm) { + if (sd != null) intCheck(sd, 1, MAX); + return format(this, sd, rm, 2); + }; + + + /* + * Return a string representing the value of this BigNumber in base b, or base 10 if b is + * omitted. If a base is specified, including base 10, round according to DECIMAL_PLACES and + * ROUNDING_MODE. If a base is not specified, and this BigNumber has a positive exponent + * that is equal to or greater than TO_EXP_POS, or a negative exponent equal to or less than + * TO_EXP_NEG, return exponential notation. + * + * [b] {number} Integer, 2 to ALPHABET.length inclusive. + * + * '[BigNumber Error] Base {not a primitive number|not an integer|out of range}: {b}' + */ + P.toString = function (b) { + var str, + n = this, + s = n.s, + e = n.e; + + // Infinity or NaN? + if (e === null) { + if (s) { + str = 'Infinity'; + if (s < 0) str = '-' + str; + } else { + str = 'NaN'; + } + } else { + if (b == null) { + str = e <= TO_EXP_NEG || e >= TO_EXP_POS + ? toExponential(coeffToString(n.c), e) + : toFixedPoint(coeffToString(n.c), e, '0'); + } else if (b === 10 && alphabetHasNormalDecimalDigits) { + n = round(new BigNumber(n), DECIMAL_PLACES + e + 1, ROUNDING_MODE); + str = toFixedPoint(coeffToString(n.c), n.e, '0'); + } else { + intCheck(b, 2, ALPHABET.length, 'Base'); + str = convertBase(toFixedPoint(coeffToString(n.c), e, '0'), 10, b, s, true); + } + + if (s < 0 && n.c[0]) str = '-' + str; + } + + return str; + }; + + + /* + * Return as toString, but do not accept a base argument, and include the minus sign for + * negative zero. + */ + P.valueOf = P.toJSON = function () { + return valueOf(this); + }; + + + P._isBigNumber = true; + + if (configObject != null) BigNumber.set(configObject); + + return BigNumber; + } + + + // PRIVATE HELPER FUNCTIONS + + // These functions don't need access to variables, + // e.g. DECIMAL_PLACES, in the scope of the `clone` function above. + + + function bitFloor(n) { + var i = n | 0; + return n > 0 || n === i ? i : i - 1; + } + + + // Return a coefficient array as a string of base 10 digits. + function coeffToString(a) { + var s, z, + i = 1, + j = a.length, + r = a[0] + ''; + + for (; i < j;) { + s = a[i++] + ''; + z = LOG_BASE - s.length; + for (; z--; s = '0' + s); + r += s; + } + + // Determine trailing zeros. + for (j = r.length; r.charCodeAt(--j) === 48;); + + return r.slice(0, j + 1 || 1); + } + + + // Compare the value of BigNumbers x and y. + function compare(x, y) { + var a, b, + xc = x.c, + yc = y.c, + i = x.s, + j = y.s, + k = x.e, + l = y.e; + + // Either NaN? + if (!i || !j) return null; + + a = xc && !xc[0]; + b = yc && !yc[0]; + + // Either zero? + if (a || b) return a ? b ? 0 : -j : i; + + // Signs differ? + if (i != j) return i; + + a = i < 0; + b = k == l; + + // Either Infinity? + if (!xc || !yc) return b ? 0 : !xc ^ a ? 1 : -1; + + // Compare exponents. + if (!b) return k > l ^ a ? 1 : -1; + + j = (k = xc.length) < (l = yc.length) ? k : l; + + // Compare digit by digit. + for (i = 0; i < j; i++) if (xc[i] != yc[i]) return xc[i] > yc[i] ^ a ? 1 : -1; + + // Compare lengths. + return k == l ? 0 : k > l ^ a ? 1 : -1; + } + + + /* + * Check that n is a primitive number, an integer, and in range, otherwise throw. + */ + function intCheck(n, min, max, name) { + if (n < min || n > max || n !== mathfloor(n)) { + throw Error + (bignumberError + (name || 'Argument') + (typeof n == 'number' + ? n < min || n > max ? ' out of range: ' : ' not an integer: ' + : ' not a primitive number: ') + String(n)); + } + } + + + // Assumes finite n. + function isOdd(n) { + var k = n.c.length - 1; + return bitFloor(n.e / LOG_BASE) == k && n.c[k] % 2 != 0; + } + + + function toExponential(str, e) { + return (str.length > 1 ? str.charAt(0) + '.' + str.slice(1) : str) + + (e < 0 ? 'e' : 'e+') + e; + } + + + function toFixedPoint(str, e, z) { + var len, zs; + + // Negative exponent? + if (e < 0) { + + // Prepend zeros. + for (zs = z + '.'; ++e; zs += z); + str = zs + str; + + // Positive exponent + } else { + len = str.length; + + // Append zeros. + if (++e > len) { + for (zs = z, e -= len; --e; zs += z); + str += zs; + } else if (e < len) { + str = str.slice(0, e) + '.' + str.slice(e); + } + } + + return str; + } + + + // EXPORT + + + BigNumber = clone(); + BigNumber['default'] = BigNumber.BigNumber = BigNumber; + + // AMD. + if (typeof define == 'function' && define.amd) { + define(function () { return BigNumber; }); + + // Node.js and other environments that support module.exports. + } else if ( true && module.exports) { + module.exports = BigNumber; + + // Browser. + } else { + if (!globalObject) { + globalObject = typeof self != 'undefined' && self ? self : window; + } + + globalObject.BigNumber = BigNumber; + } +})(this); + + /***/ }), /***/ 4691: @@ -51001,6 +54580,55 @@ function expand(str, isTop) { +/***/ }), + +/***/ 9732: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +/*jshint node:true */ + +var Buffer = (__nccwpck_require__(181).Buffer); // browserify +var SlowBuffer = (__nccwpck_require__(181).SlowBuffer); + +module.exports = bufferEq; + +function bufferEq(a, b) { + + // shortcutting on type is necessary for correctness + if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) { + return false; + } + + // buffer sizes should be well-known information, so despite this + // shortcutting, it doesn't leak any information about the *contents* of the + // buffers. + if (a.length !== b.length) { + return false; + } + + var c = 0; + for (var i = 0; i < a.length; i++) { + /*jshint bitwise:false */ + c |= a[i] ^ b[i]; // XOR + } + return c === 0; +} + +bufferEq.install = function() { + Buffer.prototype.equal = SlowBuffer.prototype.equal = function equal(that) { + return bufferEq(this, that); + }; +}; + +var origBufEqual = Buffer.prototype.equal; +var origSlowBufEqual = SlowBuffer.prototype.equal; +bufferEq.restore = function() { + Buffer.prototype.equal = origBufEqual; + SlowBuffer.prototype.equal = origSlowBufEqual; +}; + + /***/ }), /***/ 5630: @@ -51236,6 +54864,850 @@ var isArray = Array.isArray || function (xs) { }; +/***/ }), + +/***/ 6110: +/***/ ((module, exports, __nccwpck_require__) => { + +/* eslint-env browser */ + +/** + * This is the web browser implementation of `debug()`. + */ + +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.storage = localstorage(); +exports.destroy = (() => { + let warned = false; + + return () => { + if (!warned) { + warned = true; + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + }; +})(); + +/** + * Colors. + */ + +exports.colors = [ + '#0000CC', + '#0000FF', + '#0033CC', + '#0033FF', + '#0066CC', + '#0066FF', + '#0099CC', + '#0099FF', + '#00CC00', + '#00CC33', + '#00CC66', + '#00CC99', + '#00CCCC', + '#00CCFF', + '#3300CC', + '#3300FF', + '#3333CC', + '#3333FF', + '#3366CC', + '#3366FF', + '#3399CC', + '#3399FF', + '#33CC00', + '#33CC33', + '#33CC66', + '#33CC99', + '#33CCCC', + '#33CCFF', + '#6600CC', + '#6600FF', + '#6633CC', + '#6633FF', + '#66CC00', + '#66CC33', + '#9900CC', + '#9900FF', + '#9933CC', + '#9933FF', + '#99CC00', + '#99CC33', + '#CC0000', + '#CC0033', + '#CC0066', + '#CC0099', + '#CC00CC', + '#CC00FF', + '#CC3300', + '#CC3333', + '#CC3366', + '#CC3399', + '#CC33CC', + '#CC33FF', + '#CC6600', + '#CC6633', + '#CC9900', + '#CC9933', + '#CCCC00', + '#CCCC33', + '#FF0000', + '#FF0033', + '#FF0066', + '#FF0099', + '#FF00CC', + '#FF00FF', + '#FF3300', + '#FF3333', + '#FF3366', + '#FF3399', + '#FF33CC', + '#FF33FF', + '#FF6600', + '#FF6633', + '#FF9900', + '#FF9933', + '#FFCC00', + '#FFCC33' +]; + +/** + * Currently only WebKit-based Web Inspectors, Firefox >= v31, + * and the Firebug extension (any Firefox version) are known + * to support "%c" CSS customizations. + * + * TODO: add a `localStorage` variable to explicitly enable/disable colors + */ + +// eslint-disable-next-line complexity +function useColors() { + // NB: In an Electron preload script, document will be defined but not fully + // initialized. Since we know we're in Chrome, we'll just detect this case + // explicitly + if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) { + return true; + } + + // Internet Explorer and Edge do not support colors. + if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { + return false; + } + + // Is webkit? http://stackoverflow.com/a/16459606/376773 + // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 + return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) || + // Is firebug? http://stackoverflow.com/a/398120/376773 + (typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) || + // Is firefox >= v31? + // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31) || + // Double check webkit in userAgent just in case we are in a worker + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)); +} + +/** + * Colorize log arguments if enabled. + * + * @api public + */ + +function formatArgs(args) { + args[0] = (this.useColors ? '%c' : '') + + this.namespace + + (this.useColors ? ' %c' : ' ') + + args[0] + + (this.useColors ? '%c ' : ' ') + + '+' + module.exports.humanize(this.diff); + + if (!this.useColors) { + return; + } + + const c = 'color: ' + this.color; + args.splice(1, 0, c, 'color: inherit'); + + // The final "%c" is somewhat tricky, because there could be other + // arguments passed either before or after the %c, so we need to + // figure out the correct index to insert the CSS into + let index = 0; + let lastC = 0; + args[0].replace(/%[a-zA-Z%]/g, match => { + if (match === '%%') { + return; + } + index++; + if (match === '%c') { + // We only are interested in the *last* %c + // (the user may have provided their own) + lastC = index; + } + }); + + args.splice(lastC, 0, c); +} + +/** + * Invokes `console.debug()` when available. + * No-op when `console.debug` is not a "function". + * If `console.debug` is not available, falls back + * to `console.log`. + * + * @api public + */ +exports.log = console.debug || console.log || (() => {}); + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + try { + if (namespaces) { + exports.storage.setItem('debug', namespaces); + } else { + exports.storage.removeItem('debug'); + } + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ +function load() { + let r; + try { + r = exports.storage.getItem('debug'); + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } + + // If debug isn't set in LS, and we're in Electron, try to load $DEBUG + if (!r && typeof process !== 'undefined' && 'env' in process) { + r = process.env.DEBUG; + } + + return r; +} + +/** + * Localstorage attempts to return the localstorage. + * + * This is necessary because safari throws + * when a user disables cookies/localstorage + * and you attempt to access it. + * + * @return {LocalStorage} + * @api private + */ + +function localstorage() { + try { + // TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context + // The Browser also has localStorage in the global context. + return localStorage; + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} + +module.exports = __nccwpck_require__(897)(exports); + +const {formatters} = module.exports; + +/** + * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. + */ + +formatters.j = function (v) { + try { + return JSON.stringify(v); + } catch (error) { + return '[UnexpectedJSONParseError]: ' + error.message; + } +}; + + +/***/ }), + +/***/ 897: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + + +/** + * This is the common logic for both the Node.js and web browser + * implementations of `debug()`. + */ + +function setup(env) { + createDebug.debug = createDebug; + createDebug.default = createDebug; + createDebug.coerce = coerce; + createDebug.disable = disable; + createDebug.enable = enable; + createDebug.enabled = enabled; + createDebug.humanize = __nccwpck_require__(744); + createDebug.destroy = destroy; + + Object.keys(env).forEach(key => { + createDebug[key] = env[key]; + }); + + /** + * The currently active debug mode names, and names to skip. + */ + + createDebug.names = []; + createDebug.skips = []; + + /** + * Map of special "%n" handling functions, for the debug "format" argument. + * + * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". + */ + createDebug.formatters = {}; + + /** + * Selects a color for a debug namespace + * @param {String} namespace The namespace string for the debug instance to be colored + * @return {Number|String} An ANSI color code for the given namespace + * @api private + */ + function selectColor(namespace) { + let hash = 0; + + for (let i = 0; i < namespace.length; i++) { + hash = ((hash << 5) - hash) + namespace.charCodeAt(i); + hash |= 0; // Convert to 32bit integer + } + + return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; + } + createDebug.selectColor = selectColor; + + /** + * Create a debugger with the given `namespace`. + * + * @param {String} namespace + * @return {Function} + * @api public + */ + function createDebug(namespace) { + let prevTime; + let enableOverride = null; + let namespacesCache; + let enabledCache; + + function debug(...args) { + // Disabled? + if (!debug.enabled) { + return; + } + + const self = debug; + + // Set `diff` timestamp + const curr = Number(new Date()); + const ms = curr - (prevTime || curr); + self.diff = ms; + self.prev = prevTime; + self.curr = curr; + prevTime = curr; + + args[0] = createDebug.coerce(args[0]); + + if (typeof args[0] !== 'string') { + // Anything else let's inspect with %O + args.unshift('%O'); + } + + // Apply any `formatters` transformations + let index = 0; + args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => { + // If we encounter an escaped % then don't increase the array index + if (match === '%%') { + return '%'; + } + index++; + const formatter = createDebug.formatters[format]; + if (typeof formatter === 'function') { + const val = args[index]; + match = formatter.call(self, val); + + // Now we need to remove `args[index]` since it's inlined in the `format` + args.splice(index, 1); + index--; + } + return match; + }); + + // Apply env-specific formatting (colors, etc.) + createDebug.formatArgs.call(self, args); + + const logFn = self.log || createDebug.log; + logFn.apply(self, args); + } + + debug.namespace = namespace; + debug.useColors = createDebug.useColors(); + debug.color = createDebug.selectColor(namespace); + debug.extend = extend; + debug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release. + + Object.defineProperty(debug, 'enabled', { + enumerable: true, + configurable: false, + get: () => { + if (enableOverride !== null) { + return enableOverride; + } + if (namespacesCache !== createDebug.namespaces) { + namespacesCache = createDebug.namespaces; + enabledCache = createDebug.enabled(namespace); + } + + return enabledCache; + }, + set: v => { + enableOverride = v; + } + }); + + // Env-specific initialization logic for debug instances + if (typeof createDebug.init === 'function') { + createDebug.init(debug); + } + + return debug; + } + + function extend(namespace, delimiter) { + const newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace); + newDebug.log = this.log; + return newDebug; + } + + /** + * Enables a debug mode by namespaces. This can include modes + * separated by a colon and wildcards. + * + * @param {String} namespaces + * @api public + */ + function enable(namespaces) { + createDebug.save(namespaces); + createDebug.namespaces = namespaces; + + createDebug.names = []; + createDebug.skips = []; + + let i; + const split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/); + const len = split.length; + + for (i = 0; i < len; i++) { + if (!split[i]) { + // ignore empty strings + continue; + } + + namespaces = split[i].replace(/\*/g, '.*?'); + + if (namespaces[0] === '-') { + createDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$')); + } else { + createDebug.names.push(new RegExp('^' + namespaces + '$')); + } + } + } + + /** + * Disable debug output. + * + * @return {String} namespaces + * @api public + */ + function disable() { + const namespaces = [ + ...createDebug.names.map(toNamespace), + ...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace) + ].join(','); + createDebug.enable(''); + return namespaces; + } + + /** + * Returns true if the given mode name is enabled, false otherwise. + * + * @param {String} name + * @return {Boolean} + * @api public + */ + function enabled(name) { + if (name[name.length - 1] === '*') { + return true; + } + + let i; + let len; + + for (i = 0, len = createDebug.skips.length; i < len; i++) { + if (createDebug.skips[i].test(name)) { + return false; + } + } + + for (i = 0, len = createDebug.names.length; i < len; i++) { + if (createDebug.names[i].test(name)) { + return true; + } + } + + return false; + } + + /** + * Convert regexp to namespace + * + * @param {RegExp} regxep + * @return {String} namespace + * @api private + */ + function toNamespace(regexp) { + return regexp.toString() + .substring(2, regexp.toString().length - 2) + .replace(/\.\*\?$/, '*'); + } + + /** + * Coerce `val`. + * + * @param {Mixed} val + * @return {Mixed} + * @api private + */ + function coerce(val) { + if (val instanceof Error) { + return val.stack || val.message; + } + return val; + } + + /** + * XXX DO NOT USE. This is a temporary stub function. + * XXX It WILL be removed in the next major release. + */ + function destroy() { + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + + createDebug.enable(createDebug.load()); + + return createDebug; +} + +module.exports = setup; + + +/***/ }), + +/***/ 2830: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/** + * Detect Electron renderer / nwjs process, which is node, but we should + * treat as a browser. + */ + +if (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) { + module.exports = __nccwpck_require__(6110); +} else { + module.exports = __nccwpck_require__(5108); +} + + +/***/ }), + +/***/ 5108: +/***/ ((module, exports, __nccwpck_require__) => { + +/** + * Module dependencies. + */ + +const tty = __nccwpck_require__(2018); +const util = __nccwpck_require__(9023); + +/** + * This is the Node.js implementation of `debug()`. + */ + +exports.init = init; +exports.log = log; +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.destroy = util.deprecate( + () => {}, + 'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.' +); + +/** + * Colors. + */ + +exports.colors = [6, 2, 3, 4, 5, 1]; + +try { + // Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json) + // eslint-disable-next-line import/no-extraneous-dependencies + const supportsColor = __nccwpck_require__(1450); + + if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) { + exports.colors = [ + 20, + 21, + 26, + 27, + 32, + 33, + 38, + 39, + 40, + 41, + 42, + 43, + 44, + 45, + 56, + 57, + 62, + 63, + 68, + 69, + 74, + 75, + 76, + 77, + 78, + 79, + 80, + 81, + 92, + 93, + 98, + 99, + 112, + 113, + 128, + 129, + 134, + 135, + 148, + 149, + 160, + 161, + 162, + 163, + 164, + 165, + 166, + 167, + 168, + 169, + 170, + 171, + 172, + 173, + 178, + 179, + 184, + 185, + 196, + 197, + 198, + 199, + 200, + 201, + 202, + 203, + 204, + 205, + 206, + 207, + 208, + 209, + 214, + 215, + 220, + 221 + ]; + } +} catch (error) { + // Swallow - we only care if `supports-color` is available; it doesn't have to be. +} + +/** + * Build up the default `inspectOpts` object from the environment variables. + * + * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js + */ + +exports.inspectOpts = Object.keys(process.env).filter(key => { + return /^debug_/i.test(key); +}).reduce((obj, key) => { + // Camel-case + const prop = key + .substring(6) + .toLowerCase() + .replace(/_([a-z])/g, (_, k) => { + return k.toUpperCase(); + }); + + // Coerce string value into JS value + let val = process.env[key]; + if (/^(yes|on|true|enabled)$/i.test(val)) { + val = true; + } else if (/^(no|off|false|disabled)$/i.test(val)) { + val = false; + } else if (val === 'null') { + val = null; + } else { + val = Number(val); + } + + obj[prop] = val; + return obj; +}, {}); + +/** + * Is stdout a TTY? Colored output is enabled when `true`. + */ + +function useColors() { + return 'colors' in exports.inspectOpts ? + Boolean(exports.inspectOpts.colors) : + tty.isatty(process.stderr.fd); +} + +/** + * Adds ANSI color escape codes if enabled. + * + * @api public + */ + +function formatArgs(args) { + const {namespace: name, useColors} = this; + + if (useColors) { + const c = this.color; + const colorCode = '\u001B[3' + (c < 8 ? c : '8;5;' + c); + const prefix = ` ${colorCode};1m${name} \u001B[0m`; + + args[0] = prefix + args[0].split('\n').join('\n' + prefix); + args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\u001B[0m'); + } else { + args[0] = getDate() + name + ' ' + args[0]; + } +} + +function getDate() { + if (exports.inspectOpts.hideDate) { + return ''; + } + return new Date().toISOString() + ' '; +} + +/** + * Invokes `util.format()` with the specified arguments and writes to stderr. + */ + +function log(...args) { + return process.stderr.write(util.format(...args) + '\n'); +} + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + if (namespaces) { + process.env.DEBUG = namespaces; + } else { + // If you set a process.env field to null or undefined, it gets cast to the + // string 'null' or 'undefined'. Just delete instead. + delete process.env.DEBUG; + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ + +function load() { + return process.env.DEBUG; +} + +/** + * Init logic for `debug` instances. + * + * Create a new `inspectOpts` object in case `useColors` is set + * differently for a particular `debug` instance. + */ + +function init(debug) { + debug.inspectOpts = {}; + + const keys = Object.keys(exports.inspectOpts); + for (let i = 0; i < keys.length; i++) { + debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]]; + } +} + +module.exports = __nccwpck_require__(897)(exports); + +const {formatters} = module.exports; + +/** + * Map %o to `util.inspect()`, all on a single line. + */ + +formatters.o = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts) + .split('\n') + .map(str => str.trim()) + .join(' '); +}; + +/** + * Map %O to `util.inspect()`, allowing multiple lines if needed. + */ + +formatters.O = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts); +}; + + /***/ }), /***/ 2710: @@ -51350,6 +55822,15373 @@ DelayedStream.prototype._checkIfMaxDataSizeExceeded = function() { }; +/***/ }), + +/***/ 9112: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var stream = __nccwpck_require__(6131) +var eos = __nccwpck_require__(1424) +var inherits = __nccwpck_require__(9598) +var shift = __nccwpck_require__(4633) + +var SIGNAL_FLUSH = (Buffer.from && Buffer.from !== Uint8Array.from) + ? Buffer.from([0]) + : new Buffer([0]) + +var onuncork = function(self, fn) { + if (self._corked) self.once('uncork', fn) + else fn() +} + +var autoDestroy = function (self, err) { + if (self._autoDestroy) self.destroy(err) +} + +var destroyer = function(self, end) { + return function(err) { + if (err) autoDestroy(self, err.message === 'premature close' ? null : err) + else if (end && !self._ended) self.end() + } +} + +var end = function(ws, fn) { + if (!ws) return fn() + if (ws._writableState && ws._writableState.finished) return fn() + if (ws._writableState) return ws.end(fn) + ws.end() + fn() +} + +var noop = function() {} + +var toStreams2 = function(rs) { + return new (stream.Readable)({objectMode:true, highWaterMark:16}).wrap(rs) +} + +var Duplexify = function(writable, readable, opts) { + if (!(this instanceof Duplexify)) return new Duplexify(writable, readable, opts) + stream.Duplex.call(this, opts) + + this._writable = null + this._readable = null + this._readable2 = null + + this._autoDestroy = !opts || opts.autoDestroy !== false + this._forwardDestroy = !opts || opts.destroy !== false + this._forwardEnd = !opts || opts.end !== false + this._corked = 1 // start corked + this._ondrain = null + this._drained = false + this._forwarding = false + this._unwrite = null + this._unread = null + this._ended = false + + this.destroyed = false + + if (writable) this.setWritable(writable) + if (readable) this.setReadable(readable) +} + +inherits(Duplexify, stream.Duplex) + +Duplexify.obj = function(writable, readable, opts) { + if (!opts) opts = {} + opts.objectMode = true + opts.highWaterMark = 16 + return new Duplexify(writable, readable, opts) +} + +Duplexify.prototype.cork = function() { + if (++this._corked === 1) this.emit('cork') +} + +Duplexify.prototype.uncork = function() { + if (this._corked && --this._corked === 0) this.emit('uncork') +} + +Duplexify.prototype.setWritable = function(writable) { + if (this._unwrite) this._unwrite() + + if (this.destroyed) { + if (writable && writable.destroy) writable.destroy() + return + } + + if (writable === null || writable === false) { + this.end() + return + } + + var self = this + var unend = eos(writable, {writable:true, readable:false}, destroyer(this, this._forwardEnd)) + + var ondrain = function() { + var ondrain = self._ondrain + self._ondrain = null + if (ondrain) ondrain() + } + + var clear = function() { + self._writable.removeListener('drain', ondrain) + unend() + } + + if (this._unwrite) process.nextTick(ondrain) // force a drain on stream reset to avoid livelocks + + this._writable = writable + this._writable.on('drain', ondrain) + this._unwrite = clear + + this.uncork() // always uncork setWritable +} + +Duplexify.prototype.setReadable = function(readable) { + if (this._unread) this._unread() + + if (this.destroyed) { + if (readable && readable.destroy) readable.destroy() + return + } + + if (readable === null || readable === false) { + this.push(null) + this.resume() + return + } + + var self = this + var unend = eos(readable, {writable:false, readable:true}, destroyer(this)) + + var onreadable = function() { + self._forward() + } + + var onend = function() { + self.push(null) + } + + var clear = function() { + self._readable2.removeListener('readable', onreadable) + self._readable2.removeListener('end', onend) + unend() + } + + this._drained = true + this._readable = readable + this._readable2 = readable._readableState ? readable : toStreams2(readable) + this._readable2.on('readable', onreadable) + this._readable2.on('end', onend) + this._unread = clear + + this._forward() +} + +Duplexify.prototype._read = function() { + this._drained = true + this._forward() +} + +Duplexify.prototype._forward = function() { + if (this._forwarding || !this._readable2 || !this._drained) return + this._forwarding = true + + var data + + while (this._drained && (data = shift(this._readable2)) !== null) { + if (this.destroyed) continue + this._drained = this.push(data) + } + + this._forwarding = false +} + +Duplexify.prototype.destroy = function(err, cb) { + if (!cb) cb = noop + if (this.destroyed) return cb(null) + this.destroyed = true + + var self = this + process.nextTick(function() { + self._destroy(err) + cb(null) + }) +} + +Duplexify.prototype._destroy = function(err) { + if (err) { + var ondrain = this._ondrain + this._ondrain = null + if (ondrain) ondrain(err) + else this.emit('error', err) + } + + if (this._forwardDestroy) { + if (this._readable && this._readable.destroy) this._readable.destroy() + if (this._writable && this._writable.destroy) this._writable.destroy() + } + + this.emit('close') +} + +Duplexify.prototype._write = function(data, enc, cb) { + if (this.destroyed) return + if (this._corked) return onuncork(this, this._write.bind(this, data, enc, cb)) + if (data === SIGNAL_FLUSH) return this._finish(cb) + if (!this._writable) return cb() + + if (this._writable.write(data) === false) this._ondrain = cb + else if (!this.destroyed) cb() +} + +Duplexify.prototype._finish = function(cb) { + var self = this + this.emit('preend') + onuncork(this, function() { + end(self._forwardEnd && self._writable, function() { + // haxx to not emit prefinish twice + if (self._writableState.prefinished === false) self._writableState.prefinished = true + self.emit('prefinish') + onuncork(self, cb) + }) + }) +} + +Duplexify.prototype.end = function(data, enc, cb) { + if (typeof data === 'function') return this.end(null, null, data) + if (typeof enc === 'function') return this.end(data, null, enc) + this._ended = true + if (data) this.write(data) + if (!this._writableState.ending && !this._writableState.destroyed) this.write(SIGNAL_FLUSH) + return stream.Writable.prototype.end.call(this, cb) +} + +module.exports = Duplexify + + +/***/ }), + +/***/ 325: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var Buffer = (__nccwpck_require__(3058).Buffer); + +var getParamBytesForAlg = __nccwpck_require__(5028); + +var MAX_OCTET = 0x80, + CLASS_UNIVERSAL = 0, + PRIMITIVE_BIT = 0x20, + TAG_SEQ = 0x10, + TAG_INT = 0x02, + ENCODED_TAG_SEQ = (TAG_SEQ | PRIMITIVE_BIT) | (CLASS_UNIVERSAL << 6), + ENCODED_TAG_INT = TAG_INT | (CLASS_UNIVERSAL << 6); + +function base64Url(base64) { + return base64 + .replace(/=/g, '') + .replace(/\+/g, '-') + .replace(/\//g, '_'); +} + +function signatureAsBuffer(signature) { + if (Buffer.isBuffer(signature)) { + return signature; + } else if ('string' === typeof signature) { + return Buffer.from(signature, 'base64'); + } + + throw new TypeError('ECDSA signature must be a Base64 string or a Buffer'); +} + +function derToJose(signature, alg) { + signature = signatureAsBuffer(signature); + var paramBytes = getParamBytesForAlg(alg); + + // the DER encoded param should at most be the param size, plus a padding + // zero, since due to being a signed integer + var maxEncodedParamLength = paramBytes + 1; + + var inputLength = signature.length; + + var offset = 0; + if (signature[offset++] !== ENCODED_TAG_SEQ) { + throw new Error('Could not find expected "seq"'); + } + + var seqLength = signature[offset++]; + if (seqLength === (MAX_OCTET | 1)) { + seqLength = signature[offset++]; + } + + if (inputLength - offset < seqLength) { + throw new Error('"seq" specified length of "' + seqLength + '", only "' + (inputLength - offset) + '" remaining'); + } + + if (signature[offset++] !== ENCODED_TAG_INT) { + throw new Error('Could not find expected "int" for "r"'); + } + + var rLength = signature[offset++]; + + if (inputLength - offset - 2 < rLength) { + throw new Error('"r" specified length of "' + rLength + '", only "' + (inputLength - offset - 2) + '" available'); + } + + if (maxEncodedParamLength < rLength) { + throw new Error('"r" specified length of "' + rLength + '", max of "' + maxEncodedParamLength + '" is acceptable'); + } + + var rOffset = offset; + offset += rLength; + + if (signature[offset++] !== ENCODED_TAG_INT) { + throw new Error('Could not find expected "int" for "s"'); + } + + var sLength = signature[offset++]; + + if (inputLength - offset !== sLength) { + throw new Error('"s" specified length of "' + sLength + '", expected "' + (inputLength - offset) + '"'); + } + + if (maxEncodedParamLength < sLength) { + throw new Error('"s" specified length of "' + sLength + '", max of "' + maxEncodedParamLength + '" is acceptable'); + } + + var sOffset = offset; + offset += sLength; + + if (offset !== inputLength) { + throw new Error('Expected to consume entire buffer, but "' + (inputLength - offset) + '" bytes remain'); + } + + var rPadding = paramBytes - rLength, + sPadding = paramBytes - sLength; + + var dst = Buffer.allocUnsafe(rPadding + rLength + sPadding + sLength); + + for (offset = 0; offset < rPadding; ++offset) { + dst[offset] = 0; + } + signature.copy(dst, offset, rOffset + Math.max(-rPadding, 0), rOffset + rLength); + + offset = paramBytes; + + for (var o = offset; offset < o + sPadding; ++offset) { + dst[offset] = 0; + } + signature.copy(dst, offset, sOffset + Math.max(-sPadding, 0), sOffset + sLength); + + dst = dst.toString('base64'); + dst = base64Url(dst); + + return dst; +} + +function countPadding(buf, start, stop) { + var padding = 0; + while (start + padding < stop && buf[start + padding] === 0) { + ++padding; + } + + var needsSign = buf[start + padding] >= MAX_OCTET; + if (needsSign) { + --padding; + } + + return padding; +} + +function joseToDer(signature, alg) { + signature = signatureAsBuffer(signature); + var paramBytes = getParamBytesForAlg(alg); + + var signatureBytes = signature.length; + if (signatureBytes !== paramBytes * 2) { + throw new TypeError('"' + alg + '" signatures must be "' + paramBytes * 2 + '" bytes, saw "' + signatureBytes + '"'); + } + + var rPadding = countPadding(signature, 0, paramBytes); + var sPadding = countPadding(signature, paramBytes, signature.length); + var rLength = paramBytes - rPadding; + var sLength = paramBytes - sPadding; + + var rsBytes = 1 + 1 + rLength + 1 + 1 + sLength; + + var shortLength = rsBytes < MAX_OCTET; + + var dst = Buffer.allocUnsafe((shortLength ? 2 : 3) + rsBytes); + + var offset = 0; + dst[offset++] = ENCODED_TAG_SEQ; + if (shortLength) { + // Bit 8 has value "0" + // bits 7-1 give the length. + dst[offset++] = rsBytes; + } else { + // Bit 8 of first octet has value "1" + // bits 7-1 give the number of additional length octets. + dst[offset++] = MAX_OCTET | 1; + // length, base 256 + dst[offset++] = rsBytes & 0xff; + } + dst[offset++] = ENCODED_TAG_INT; + dst[offset++] = rLength; + if (rPadding < 0) { + dst[offset++] = 0; + offset += signature.copy(dst, offset, 0, paramBytes); + } else { + offset += signature.copy(dst, offset, rPadding, paramBytes); + } + dst[offset++] = ENCODED_TAG_INT; + dst[offset++] = sLength; + if (sPadding < 0) { + dst[offset++] = 0; + signature.copy(dst, offset, paramBytes); + } else { + signature.copy(dst, offset, paramBytes + sPadding); + } + + return dst; +} + +module.exports = { + derToJose: derToJose, + joseToDer: joseToDer +}; + + +/***/ }), + +/***/ 5028: +/***/ ((module) => { + +"use strict"; + + +function getParamSize(keySize) { + var result = ((keySize / 8) | 0) + (keySize % 8 === 0 ? 0 : 1); + return result; +} + +var paramBytesForAlg = { + ES256: getParamSize(256), + ES384: getParamSize(384), + ES512: getParamSize(521) +}; + +function getParamBytesForAlg(alg) { + var paramBytes = paramBytesForAlg[alg]; + if (paramBytes) { + return paramBytes; + } + + throw new Error('Unknown algorithm "' + alg + '"'); +} + +module.exports = getParamBytesForAlg; + + +/***/ }), + +/***/ 1424: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var once = __nccwpck_require__(5560); + +var noop = function() {}; + +var isRequest = function(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +}; + +var isChildProcess = function(stream) { + return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3 +}; + +var eos = function(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + + callback = once(callback || noop); + + var ws = stream._writableState; + var rs = stream._readableState; + var readable = opts.readable || (opts.readable !== false && stream.readable); + var writable = opts.writable || (opts.writable !== false && stream.writable); + var cancelled = false; + + var onlegacyfinish = function() { + if (!stream.writable) onfinish(); + }; + + var onfinish = function() { + writable = false; + if (!readable) callback.call(stream); + }; + + var onend = function() { + readable = false; + if (!writable) callback.call(stream); + }; + + var onexit = function(exitCode) { + callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null); + }; + + var onerror = function(err) { + callback.call(stream, err); + }; + + var onclose = function() { + process.nextTick(onclosenexttick); + }; + + var onclosenexttick = function() { + if (cancelled) return; + if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close')); + if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close')); + }; + + var onrequest = function() { + stream.req.on('finish', onfinish); + }; + + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest(); + else stream.on('request', onrequest); + } else if (writable && !ws) { // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } + + if (isChildProcess(stream)) stream.on('exit', onexit); + + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + + return function() { + cancelled = true; + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('exit', onexit); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +}; + +module.exports = eos; + + +/***/ }), + +/***/ 6577: +/***/ ((module, exports) => { + +"use strict"; +/** + * @author Toru Nagashima + * @copyright 2015 Toru Nagashima. All rights reserved. + * See LICENSE file in root directory for full license. + */ + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +/** + * @typedef {object} PrivateData + * @property {EventTarget} eventTarget The event target. + * @property {{type:string}} event The original event object. + * @property {number} eventPhase The current event phase. + * @property {EventTarget|null} currentTarget The current event target. + * @property {boolean} canceled The flag to prevent default. + * @property {boolean} stopped The flag to stop propagation. + * @property {boolean} immediateStopped The flag to stop propagation immediately. + * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null. + * @property {number} timeStamp The unix time. + * @private + */ + +/** + * Private data for event wrappers. + * @type {WeakMap} + * @private + */ +const privateData = new WeakMap(); + +/** + * Cache for wrapper classes. + * @type {WeakMap} + * @private + */ +const wrappers = new WeakMap(); + +/** + * Get private data. + * @param {Event} event The event object to get private data. + * @returns {PrivateData} The private data of the event. + * @private + */ +function pd(event) { + const retv = privateData.get(event); + console.assert( + retv != null, + "'this' is expected an Event object, but got", + event + ); + return retv +} + +/** + * https://dom.spec.whatwg.org/#set-the-canceled-flag + * @param data {PrivateData} private data. + */ +function setCancelFlag(data) { + if (data.passiveListener != null) { + if ( + typeof console !== "undefined" && + typeof console.error === "function" + ) { + console.error( + "Unable to preventDefault inside passive event listener invocation.", + data.passiveListener + ); + } + return + } + if (!data.event.cancelable) { + return + } + + data.canceled = true; + if (typeof data.event.preventDefault === "function") { + data.event.preventDefault(); + } +} + +/** + * @see https://dom.spec.whatwg.org/#interface-event + * @private + */ +/** + * The event wrapper. + * @constructor + * @param {EventTarget} eventTarget The event target of this dispatching. + * @param {Event|{type:string}} event The original event to wrap. + */ +function Event(eventTarget, event) { + privateData.set(this, { + eventTarget, + event, + eventPhase: 2, + currentTarget: eventTarget, + canceled: false, + stopped: false, + immediateStopped: false, + passiveListener: null, + timeStamp: event.timeStamp || Date.now(), + }); + + // https://heycam.github.io/webidl/#Unforgeable + Object.defineProperty(this, "isTrusted", { value: false, enumerable: true }); + + // Define accessors + const keys = Object.keys(event); + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (!(key in this)) { + Object.defineProperty(this, key, defineRedirectDescriptor(key)); + } + } +} + +// Should be enumerable, but class methods are not enumerable. +Event.prototype = { + /** + * The type of this event. + * @type {string} + */ + get type() { + return pd(this).event.type + }, + + /** + * The target of this event. + * @type {EventTarget} + */ + get target() { + return pd(this).eventTarget + }, + + /** + * The target of this event. + * @type {EventTarget} + */ + get currentTarget() { + return pd(this).currentTarget + }, + + /** + * @returns {EventTarget[]} The composed path of this event. + */ + composedPath() { + const currentTarget = pd(this).currentTarget; + if (currentTarget == null) { + return [] + } + return [currentTarget] + }, + + /** + * Constant of NONE. + * @type {number} + */ + get NONE() { + return 0 + }, + + /** + * Constant of CAPTURING_PHASE. + * @type {number} + */ + get CAPTURING_PHASE() { + return 1 + }, + + /** + * Constant of AT_TARGET. + * @type {number} + */ + get AT_TARGET() { + return 2 + }, + + /** + * Constant of BUBBLING_PHASE. + * @type {number} + */ + get BUBBLING_PHASE() { + return 3 + }, + + /** + * The target of this event. + * @type {number} + */ + get eventPhase() { + return pd(this).eventPhase + }, + + /** + * Stop event bubbling. + * @returns {void} + */ + stopPropagation() { + const data = pd(this); + + data.stopped = true; + if (typeof data.event.stopPropagation === "function") { + data.event.stopPropagation(); + } + }, + + /** + * Stop event bubbling. + * @returns {void} + */ + stopImmediatePropagation() { + const data = pd(this); + + data.stopped = true; + data.immediateStopped = true; + if (typeof data.event.stopImmediatePropagation === "function") { + data.event.stopImmediatePropagation(); + } + }, + + /** + * The flag to be bubbling. + * @type {boolean} + */ + get bubbles() { + return Boolean(pd(this).event.bubbles) + }, + + /** + * The flag to be cancelable. + * @type {boolean} + */ + get cancelable() { + return Boolean(pd(this).event.cancelable) + }, + + /** + * Cancel this event. + * @returns {void} + */ + preventDefault() { + setCancelFlag(pd(this)); + }, + + /** + * The flag to indicate cancellation state. + * @type {boolean} + */ + get defaultPrevented() { + return pd(this).canceled + }, + + /** + * The flag to be composed. + * @type {boolean} + */ + get composed() { + return Boolean(pd(this).event.composed) + }, + + /** + * The unix time of this event. + * @type {number} + */ + get timeStamp() { + return pd(this).timeStamp + }, + + /** + * The target of this event. + * @type {EventTarget} + * @deprecated + */ + get srcElement() { + return pd(this).eventTarget + }, + + /** + * The flag to stop event bubbling. + * @type {boolean} + * @deprecated + */ + get cancelBubble() { + return pd(this).stopped + }, + set cancelBubble(value) { + if (!value) { + return + } + const data = pd(this); + + data.stopped = true; + if (typeof data.event.cancelBubble === "boolean") { + data.event.cancelBubble = true; + } + }, + + /** + * The flag to indicate cancellation state. + * @type {boolean} + * @deprecated + */ + get returnValue() { + return !pd(this).canceled + }, + set returnValue(value) { + if (!value) { + setCancelFlag(pd(this)); + } + }, + + /** + * Initialize this event object. But do nothing under event dispatching. + * @param {string} type The event type. + * @param {boolean} [bubbles=false] The flag to be possible to bubble up. + * @param {boolean} [cancelable=false] The flag to be possible to cancel. + * @deprecated + */ + initEvent() { + // Do nothing. + }, +}; + +// `constructor` is not enumerable. +Object.defineProperty(Event.prototype, "constructor", { + value: Event, + configurable: true, + writable: true, +}); + +// Ensure `event instanceof window.Event` is `true`. +if (typeof window !== "undefined" && typeof window.Event !== "undefined") { + Object.setPrototypeOf(Event.prototype, window.Event.prototype); + + // Make association for wrappers. + wrappers.set(window.Event.prototype, Event); +} + +/** + * Get the property descriptor to redirect a given property. + * @param {string} key Property name to define property descriptor. + * @returns {PropertyDescriptor} The property descriptor to redirect the property. + * @private + */ +function defineRedirectDescriptor(key) { + return { + get() { + return pd(this).event[key] + }, + set(value) { + pd(this).event[key] = value; + }, + configurable: true, + enumerable: true, + } +} + +/** + * Get the property descriptor to call a given method property. + * @param {string} key Property name to define property descriptor. + * @returns {PropertyDescriptor} The property descriptor to call the method property. + * @private + */ +function defineCallDescriptor(key) { + return { + value() { + const event = pd(this).event; + return event[key].apply(event, arguments) + }, + configurable: true, + enumerable: true, + } +} + +/** + * Define new wrapper class. + * @param {Function} BaseEvent The base wrapper class. + * @param {Object} proto The prototype of the original event. + * @returns {Function} The defined wrapper class. + * @private + */ +function defineWrapper(BaseEvent, proto) { + const keys = Object.keys(proto); + if (keys.length === 0) { + return BaseEvent + } + + /** CustomEvent */ + function CustomEvent(eventTarget, event) { + BaseEvent.call(this, eventTarget, event); + } + + CustomEvent.prototype = Object.create(BaseEvent.prototype, { + constructor: { value: CustomEvent, configurable: true, writable: true }, + }); + + // Define accessors. + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (!(key in BaseEvent.prototype)) { + const descriptor = Object.getOwnPropertyDescriptor(proto, key); + const isFunc = typeof descriptor.value === "function"; + Object.defineProperty( + CustomEvent.prototype, + key, + isFunc + ? defineCallDescriptor(key) + : defineRedirectDescriptor(key) + ); + } + } + + return CustomEvent +} + +/** + * Get the wrapper class of a given prototype. + * @param {Object} proto The prototype of the original event to get its wrapper. + * @returns {Function} The wrapper class. + * @private + */ +function getWrapper(proto) { + if (proto == null || proto === Object.prototype) { + return Event + } + + let wrapper = wrappers.get(proto); + if (wrapper == null) { + wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto); + wrappers.set(proto, wrapper); + } + return wrapper +} + +/** + * Wrap a given event to management a dispatching. + * @param {EventTarget} eventTarget The event target of this dispatching. + * @param {Object} event The event to wrap. + * @returns {Event} The wrapper instance. + * @private + */ +function wrapEvent(eventTarget, event) { + const Wrapper = getWrapper(Object.getPrototypeOf(event)); + return new Wrapper(eventTarget, event) +} + +/** + * Get the immediateStopped flag of a given event. + * @param {Event} event The event to get. + * @returns {boolean} The flag to stop propagation immediately. + * @private + */ +function isStopped(event) { + return pd(event).immediateStopped +} + +/** + * Set the current event phase of a given event. + * @param {Event} event The event to set current target. + * @param {number} eventPhase New event phase. + * @returns {void} + * @private + */ +function setEventPhase(event, eventPhase) { + pd(event).eventPhase = eventPhase; +} + +/** + * Set the current target of a given event. + * @param {Event} event The event to set current target. + * @param {EventTarget|null} currentTarget New current target. + * @returns {void} + * @private + */ +function setCurrentTarget(event, currentTarget) { + pd(event).currentTarget = currentTarget; +} + +/** + * Set a passive listener of a given event. + * @param {Event} event The event to set current target. + * @param {Function|null} passiveListener New passive listener. + * @returns {void} + * @private + */ +function setPassiveListener(event, passiveListener) { + pd(event).passiveListener = passiveListener; +} + +/** + * @typedef {object} ListenerNode + * @property {Function} listener + * @property {1|2|3} listenerType + * @property {boolean} passive + * @property {boolean} once + * @property {ListenerNode|null} next + * @private + */ + +/** + * @type {WeakMap>} + * @private + */ +const listenersMap = new WeakMap(); + +// Listener types +const CAPTURE = 1; +const BUBBLE = 2; +const ATTRIBUTE = 3; + +/** + * Check whether a given value is an object or not. + * @param {any} x The value to check. + * @returns {boolean} `true` if the value is an object. + */ +function isObject(x) { + return x !== null && typeof x === "object" //eslint-disable-line no-restricted-syntax +} + +/** + * Get listeners. + * @param {EventTarget} eventTarget The event target to get. + * @returns {Map} The listeners. + * @private + */ +function getListeners(eventTarget) { + const listeners = listenersMap.get(eventTarget); + if (listeners == null) { + throw new TypeError( + "'this' is expected an EventTarget object, but got another value." + ) + } + return listeners +} + +/** + * Get the property descriptor for the event attribute of a given event. + * @param {string} eventName The event name to get property descriptor. + * @returns {PropertyDescriptor} The property descriptor. + * @private + */ +function defineEventAttributeDescriptor(eventName) { + return { + get() { + const listeners = getListeners(this); + let node = listeners.get(eventName); + while (node != null) { + if (node.listenerType === ATTRIBUTE) { + return node.listener + } + node = node.next; + } + return null + }, + + set(listener) { + if (typeof listener !== "function" && !isObject(listener)) { + listener = null; // eslint-disable-line no-param-reassign + } + const listeners = getListeners(this); + + // Traverse to the tail while removing old value. + let prev = null; + let node = listeners.get(eventName); + while (node != null) { + if (node.listenerType === ATTRIBUTE) { + // Remove old value. + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + } else { + prev = node; + } + + node = node.next; + } + + // Add new value. + if (listener !== null) { + const newNode = { + listener, + listenerType: ATTRIBUTE, + passive: false, + once: false, + next: null, + }; + if (prev === null) { + listeners.set(eventName, newNode); + } else { + prev.next = newNode; + } + } + }, + configurable: true, + enumerable: true, + } +} + +/** + * Define an event attribute (e.g. `eventTarget.onclick`). + * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite. + * @param {string} eventName The event name to define. + * @returns {void} + */ +function defineEventAttribute(eventTargetPrototype, eventName) { + Object.defineProperty( + eventTargetPrototype, + `on${eventName}`, + defineEventAttributeDescriptor(eventName) + ); +} + +/** + * Define a custom EventTarget with event attributes. + * @param {string[]} eventNames Event names for event attributes. + * @returns {EventTarget} The custom EventTarget. + * @private + */ +function defineCustomEventTarget(eventNames) { + /** CustomEventTarget */ + function CustomEventTarget() { + EventTarget.call(this); + } + + CustomEventTarget.prototype = Object.create(EventTarget.prototype, { + constructor: { + value: CustomEventTarget, + configurable: true, + writable: true, + }, + }); + + for (let i = 0; i < eventNames.length; ++i) { + defineEventAttribute(CustomEventTarget.prototype, eventNames[i]); + } + + return CustomEventTarget +} + +/** + * EventTarget. + * + * - This is constructor if no arguments. + * - This is a function which returns a CustomEventTarget constructor if there are arguments. + * + * For example: + * + * class A extends EventTarget {} + * class B extends EventTarget("message") {} + * class C extends EventTarget("message", "error") {} + * class D extends EventTarget(["message", "error"]) {} + */ +function EventTarget() { + /*eslint-disable consistent-return */ + if (this instanceof EventTarget) { + listenersMap.set(this, new Map()); + return + } + if (arguments.length === 1 && Array.isArray(arguments[0])) { + return defineCustomEventTarget(arguments[0]) + } + if (arguments.length > 0) { + const types = new Array(arguments.length); + for (let i = 0; i < arguments.length; ++i) { + types[i] = arguments[i]; + } + return defineCustomEventTarget(types) + } + throw new TypeError("Cannot call a class as a function") + /*eslint-enable consistent-return */ +} + +// Should be enumerable, but class methods are not enumerable. +EventTarget.prototype = { + /** + * Add a given listener to this event target. + * @param {string} eventName The event name to add. + * @param {Function} listener The listener to add. + * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. + * @returns {void} + */ + addEventListener(eventName, listener, options) { + if (listener == null) { + return + } + if (typeof listener !== "function" && !isObject(listener)) { + throw new TypeError("'listener' should be a function or an object.") + } + + const listeners = getListeners(this); + const optionsIsObj = isObject(options); + const capture = optionsIsObj + ? Boolean(options.capture) + : Boolean(options); + const listenerType = capture ? CAPTURE : BUBBLE; + const newNode = { + listener, + listenerType, + passive: optionsIsObj && Boolean(options.passive), + once: optionsIsObj && Boolean(options.once), + next: null, + }; + + // Set it as the first node if the first node is null. + let node = listeners.get(eventName); + if (node === undefined) { + listeners.set(eventName, newNode); + return + } + + // Traverse to the tail while checking duplication.. + let prev = null; + while (node != null) { + if ( + node.listener === listener && + node.listenerType === listenerType + ) { + // Should ignore duplication. + return + } + prev = node; + node = node.next; + } + + // Add it. + prev.next = newNode; + }, + + /** + * Remove a given listener from this event target. + * @param {string} eventName The event name to remove. + * @param {Function} listener The listener to remove. + * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. + * @returns {void} + */ + removeEventListener(eventName, listener, options) { + if (listener == null) { + return + } + + const listeners = getListeners(this); + const capture = isObject(options) + ? Boolean(options.capture) + : Boolean(options); + const listenerType = capture ? CAPTURE : BUBBLE; + + let prev = null; + let node = listeners.get(eventName); + while (node != null) { + if ( + node.listener === listener && + node.listenerType === listenerType + ) { + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + return + } + + prev = node; + node = node.next; + } + }, + + /** + * Dispatch a given event. + * @param {Event|{type:string}} event The event to dispatch. + * @returns {boolean} `false` if canceled. + */ + dispatchEvent(event) { + if (event == null || typeof event.type !== "string") { + throw new TypeError('"event.type" should be a string.') + } + + // If listeners aren't registered, terminate. + const listeners = getListeners(this); + const eventName = event.type; + let node = listeners.get(eventName); + if (node == null) { + return true + } + + // Since we cannot rewrite several properties, so wrap object. + const wrappedEvent = wrapEvent(this, event); + + // This doesn't process capturing phase and bubbling phase. + // This isn't participating in a tree. + let prev = null; + while (node != null) { + // Remove this listener if it's once + if (node.once) { + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + } else { + prev = node; + } + + // Call this listener + setPassiveListener( + wrappedEvent, + node.passive ? node.listener : null + ); + if (typeof node.listener === "function") { + try { + node.listener.call(this, wrappedEvent); + } catch (err) { + if ( + typeof console !== "undefined" && + typeof console.error === "function" + ) { + console.error(err); + } + } + } else if ( + node.listenerType !== ATTRIBUTE && + typeof node.listener.handleEvent === "function" + ) { + node.listener.handleEvent(wrappedEvent); + } + + // Break if `event.stopImmediatePropagation` was called. + if (isStopped(wrappedEvent)) { + break + } + + node = node.next; + } + setPassiveListener(wrappedEvent, null); + setEventPhase(wrappedEvent, 0); + setCurrentTarget(wrappedEvent, null); + + return !wrappedEvent.defaultPrevented + }, +}; + +// `constructor` is not enumerable. +Object.defineProperty(EventTarget.prototype, "constructor", { + value: EventTarget, + configurable: true, + writable: true, +}); + +// Ensure `eventTarget instanceof window.EventTarget` is `true`. +if ( + typeof window !== "undefined" && + typeof window.EventTarget !== "undefined" +) { + Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype); +} + +exports.defineEventAttribute = defineEventAttribute; +exports.EventTarget = EventTarget; +exports["default"] = EventTarget; + +module.exports = EventTarget +module.exports.EventTarget = module.exports["default"] = EventTarget +module.exports.defineEventAttribute = defineEventAttribute +//# sourceMappingURL=event-target-shim.js.map + + +/***/ }), + +/***/ 3860: +/***/ ((module) => { + +"use strict"; + + +var hasOwn = Object.prototype.hasOwnProperty; +var toStr = Object.prototype.toString; +var defineProperty = Object.defineProperty; +var gOPD = Object.getOwnPropertyDescriptor; + +var isArray = function isArray(arr) { + if (typeof Array.isArray === 'function') { + return Array.isArray(arr); + } + + return toStr.call(arr) === '[object Array]'; +}; + +var isPlainObject = function isPlainObject(obj) { + if (!obj || toStr.call(obj) !== '[object Object]') { + return false; + } + + var hasOwnConstructor = hasOwn.call(obj, 'constructor'); + var hasIsPrototypeOf = obj.constructor && obj.constructor.prototype && hasOwn.call(obj.constructor.prototype, 'isPrototypeOf'); + // Not own constructor property must be Object + if (obj.constructor && !hasOwnConstructor && !hasIsPrototypeOf) { + return false; + } + + // Own properties are enumerated firstly, so to speed up, + // if last one is own, then all properties are own. + var key; + for (key in obj) { /**/ } + + return typeof key === 'undefined' || hasOwn.call(obj, key); +}; + +// If name is '__proto__', and Object.defineProperty is available, define __proto__ as an own property on target +var setProperty = function setProperty(target, options) { + if (defineProperty && options.name === '__proto__') { + defineProperty(target, options.name, { + enumerable: true, + configurable: true, + value: options.newValue, + writable: true + }); + } else { + target[options.name] = options.newValue; + } +}; + +// Return undefined instead of __proto__ if '__proto__' is not an own property +var getProperty = function getProperty(obj, name) { + if (name === '__proto__') { + if (!hasOwn.call(obj, name)) { + return void 0; + } else if (gOPD) { + // In early versions of node, obj['__proto__'] is buggy when obj has + // __proto__ as an own property. Object.getOwnPropertyDescriptor() works. + return gOPD(obj, name).value; + } + } + + return obj[name]; +}; + +module.exports = function extend() { + var options, name, src, copy, copyIsArray, clone; + var target = arguments[0]; + var i = 1; + var length = arguments.length; + var deep = false; + + // Handle a deep copy situation + if (typeof target === 'boolean') { + deep = target; + target = arguments[1] || {}; + // skip the boolean and the target + i = 2; + } + if (target == null || (typeof target !== 'object' && typeof target !== 'function')) { + target = {}; + } + + for (; i < length; ++i) { + options = arguments[i]; + // Only deal with non-null/undefined values + if (options != null) { + // Extend the base object + for (name in options) { + src = getProperty(target, name); + copy = getProperty(options, name); + + // Prevent never-ending loop + if (target !== copy) { + // Recurse if we're merging plain objects or arrays + if (deep && copy && (isPlainObject(copy) || (copyIsArray = isArray(copy)))) { + if (copyIsArray) { + copyIsArray = false; + clone = src && isArray(src) ? src : []; + } else { + clone = src && isPlainObject(src) ? src : {}; + } + + // Never move original objects, clone them + setProperty(target, { name: name, newValue: extend(deep, clone, copy) }); + + // Don't bring in undefined values + } else if (typeof copy !== 'undefined') { + setProperty(target, { name: name, newValue: copy }); + } + } + } + } + } + + // Return the modified object + return target; +}; + + +/***/ }), + +/***/ 9741: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const validator = __nccwpck_require__(9433); +const XMLParser = __nccwpck_require__(9844); +const XMLBuilder = __nccwpck_require__(659); + +module.exports = { + XMLParser: XMLParser, + XMLValidator: validator, + XMLBuilder: XMLBuilder +} + +/***/ }), + +/***/ 812: +/***/ ((module) => { + +function getIgnoreAttributesFn(ignoreAttributes) { + if (typeof ignoreAttributes === 'function') { + return ignoreAttributes + } + if (Array.isArray(ignoreAttributes)) { + return (attrName) => { + for (const pattern of ignoreAttributes) { + if (typeof pattern === 'string' && attrName === pattern) { + return true + } + if (pattern instanceof RegExp && pattern.test(attrName)) { + return true + } + } + } + } + return () => false +} + +module.exports = getIgnoreAttributesFn + +/***/ }), + +/***/ 7019: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +const nameStartChar = ':A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD'; +const nameChar = nameStartChar + '\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040'; +const nameRegexp = '[' + nameStartChar + '][' + nameChar + ']*' +const regexName = new RegExp('^' + nameRegexp + '$'); + +const getAllMatches = function(string, regex) { + const matches = []; + let match = regex.exec(string); + while (match) { + const allmatches = []; + allmatches.startIndex = regex.lastIndex - match[0].length; + const len = match.length; + for (let index = 0; index < len; index++) { + allmatches.push(match[index]); + } + matches.push(allmatches); + match = regex.exec(string); + } + return matches; +}; + +const isName = function(string) { + const match = regexName.exec(string); + return !(match === null || typeof match === 'undefined'); +}; + +exports.isExist = function(v) { + return typeof v !== 'undefined'; +}; + +exports.isEmptyObject = function(obj) { + return Object.keys(obj).length === 0; +}; + +/** + * Copy all the properties of a into b. + * @param {*} target + * @param {*} a + */ +exports.merge = function(target, a, arrayMode) { + if (a) { + const keys = Object.keys(a); // will return an array of own properties + const len = keys.length; //don't make it inline + for (let i = 0; i < len; i++) { + if (arrayMode === 'strict') { + target[keys[i]] = [ a[keys[i]] ]; + } else { + target[keys[i]] = a[keys[i]]; + } + } + } +}; +/* exports.merge =function (b,a){ + return Object.assign(b,a); +} */ + +exports.getValue = function(v) { + if (exports.isExist(v)) { + return v; + } else { + return ''; + } +}; + +// const fakeCall = function(a) {return a;}; +// const fakeCallNoReturn = function() {}; + +exports.isName = isName; +exports.getAllMatches = getAllMatches; +exports.nameRegexp = nameRegexp; + + +/***/ }), + +/***/ 9433: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +const util = __nccwpck_require__(7019); + +const defaultOptions = { + allowBooleanAttributes: false, //A tag can have attributes without any value + unpairedTags: [] +}; + +//const tagsPattern = new RegExp("<\\/?([\\w:\\-_\.]+)\\s*\/?>","g"); +exports.validate = function (xmlData, options) { + options = Object.assign({}, defaultOptions, options); + + //xmlData = xmlData.replace(/(\r\n|\n|\r)/gm,"");//make it single line + //xmlData = xmlData.replace(/(^\s*<\?xml.*?\?>)/g,"");//Remove XML starting tag + //xmlData = xmlData.replace(/()/g,"");//Remove DOCTYPE + const tags = []; + let tagFound = false; + + //indicates that the root tag has been closed (aka. depth 0 has been reached) + let reachedRoot = false; + + if (xmlData[0] === '\ufeff') { + // check for byte order mark (BOM) + xmlData = xmlData.substr(1); + } + + for (let i = 0; i < xmlData.length; i++) { + + if (xmlData[i] === '<' && xmlData[i+1] === '?') { + i+=2; + i = readPI(xmlData,i); + if (i.err) return i; + }else if (xmlData[i] === '<') { + //starting of tag + //read until you reach to '>' avoiding any '>' in attribute value + let tagStartPos = i; + i++; + + if (xmlData[i] === '!') { + i = readCommentAndCDATA(xmlData, i); + continue; + } else { + let closingTag = false; + if (xmlData[i] === '/') { + //closing tag + closingTag = true; + i++; + } + //read tagname + let tagName = ''; + for (; i < xmlData.length && + xmlData[i] !== '>' && + xmlData[i] !== ' ' && + xmlData[i] !== '\t' && + xmlData[i] !== '\n' && + xmlData[i] !== '\r'; i++ + ) { + tagName += xmlData[i]; + } + tagName = tagName.trim(); + //console.log(tagName); + + if (tagName[tagName.length - 1] === '/') { + //self closing tag without attributes + tagName = tagName.substring(0, tagName.length - 1); + //continue; + i--; + } + if (!validateTagName(tagName)) { + let msg; + if (tagName.trim().length === 0) { + msg = "Invalid space after '<'."; + } else { + msg = "Tag '"+tagName+"' is an invalid name."; + } + return getErrorObject('InvalidTag', msg, getLineNumberForPosition(xmlData, i)); + } + + const result = readAttributeStr(xmlData, i); + if (result === false) { + return getErrorObject('InvalidAttr', "Attributes for '"+tagName+"' have open quote.", getLineNumberForPosition(xmlData, i)); + } + let attrStr = result.value; + i = result.index; + + if (attrStr[attrStr.length - 1] === '/') { + //self closing tag + const attrStrStart = i - attrStr.length; + attrStr = attrStr.substring(0, attrStr.length - 1); + const isValid = validateAttributeString(attrStr, options); + if (isValid === true) { + tagFound = true; + //continue; //text may presents after self closing tag + } else { + //the result from the nested function returns the position of the error within the attribute + //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute + //this gives us the absolute index in the entire xml, which we can use to find the line at last + return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, attrStrStart + isValid.err.line)); + } + } else if (closingTag) { + if (!result.tagClosed) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' doesn't have proper closing.", getLineNumberForPosition(xmlData, i)); + } else if (attrStr.trim().length > 0) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' can't have attributes or invalid starting.", getLineNumberForPosition(xmlData, tagStartPos)); + } else if (tags.length === 0) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' has not been opened.", getLineNumberForPosition(xmlData, tagStartPos)); + } else { + const otg = tags.pop(); + if (tagName !== otg.tagName) { + let openPos = getLineNumberForPosition(xmlData, otg.tagStartPos); + return getErrorObject('InvalidTag', + "Expected closing tag '"+otg.tagName+"' (opened in line "+openPos.line+", col "+openPos.col+") instead of closing tag '"+tagName+"'.", + getLineNumberForPosition(xmlData, tagStartPos)); + } + + //when there are no more tags, we reached the root level. + if (tags.length == 0) { + reachedRoot = true; + } + } + } else { + const isValid = validateAttributeString(attrStr, options); + if (isValid !== true) { + //the result from the nested function returns the position of the error within the attribute + //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute + //this gives us the absolute index in the entire xml, which we can use to find the line at last + return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, i - attrStr.length + isValid.err.line)); + } + + //if the root level has been reached before ... + if (reachedRoot === true) { + return getErrorObject('InvalidXml', 'Multiple possible root nodes found.', getLineNumberForPosition(xmlData, i)); + } else if(options.unpairedTags.indexOf(tagName) !== -1){ + //don't push into stack + } else { + tags.push({tagName, tagStartPos}); + } + tagFound = true; + } + + //skip tag text value + //It may include comments and CDATA value + for (i++; i < xmlData.length; i++) { + if (xmlData[i] === '<') { + if (xmlData[i + 1] === '!') { + //comment or CADATA + i++; + i = readCommentAndCDATA(xmlData, i); + continue; + } else if (xmlData[i+1] === '?') { + i = readPI(xmlData, ++i); + if (i.err) return i; + } else{ + break; + } + } else if (xmlData[i] === '&') { + const afterAmp = validateAmpersand(xmlData, i); + if (afterAmp == -1) + return getErrorObject('InvalidChar', "char '&' is not expected.", getLineNumberForPosition(xmlData, i)); + i = afterAmp; + }else{ + if (reachedRoot === true && !isWhiteSpace(xmlData[i])) { + return getErrorObject('InvalidXml', "Extra text at the end", getLineNumberForPosition(xmlData, i)); + } + } + } //end of reading tag text value + if (xmlData[i] === '<') { + i--; + } + } + } else { + if ( isWhiteSpace(xmlData[i])) { + continue; + } + return getErrorObject('InvalidChar', "char '"+xmlData[i]+"' is not expected.", getLineNumberForPosition(xmlData, i)); + } + } + + if (!tagFound) { + return getErrorObject('InvalidXml', 'Start tag expected.', 1); + }else if (tags.length == 1) { + return getErrorObject('InvalidTag', "Unclosed tag '"+tags[0].tagName+"'.", getLineNumberForPosition(xmlData, tags[0].tagStartPos)); + }else if (tags.length > 0) { + return getErrorObject('InvalidXml', "Invalid '"+ + JSON.stringify(tags.map(t => t.tagName), null, 4).replace(/\r?\n/g, '')+ + "' found.", {line: 1, col: 1}); + } + + return true; +}; + +function isWhiteSpace(char){ + return char === ' ' || char === '\t' || char === '\n' || char === '\r'; +} +/** + * Read Processing insstructions and skip + * @param {*} xmlData + * @param {*} i + */ +function readPI(xmlData, i) { + const start = i; + for (; i < xmlData.length; i++) { + if (xmlData[i] == '?' || xmlData[i] == ' ') { + //tagname + const tagname = xmlData.substr(start, i - start); + if (i > 5 && tagname === 'xml') { + return getErrorObject('InvalidXml', 'XML declaration allowed only at the start of the document.', getLineNumberForPosition(xmlData, i)); + } else if (xmlData[i] == '?' && xmlData[i + 1] == '>') { + //check if valid attribut string + i++; + break; + } else { + continue; + } + } + } + return i; +} + +function readCommentAndCDATA(xmlData, i) { + if (xmlData.length > i + 5 && xmlData[i + 1] === '-' && xmlData[i + 2] === '-') { + //comment + for (i += 3; i < xmlData.length; i++) { + if (xmlData[i] === '-' && xmlData[i + 1] === '-' && xmlData[i + 2] === '>') { + i += 2; + break; + } + } + } else if ( + xmlData.length > i + 8 && + xmlData[i + 1] === 'D' && + xmlData[i + 2] === 'O' && + xmlData[i + 3] === 'C' && + xmlData[i + 4] === 'T' && + xmlData[i + 5] === 'Y' && + xmlData[i + 6] === 'P' && + xmlData[i + 7] === 'E' + ) { + let angleBracketsCount = 1; + for (i += 8; i < xmlData.length; i++) { + if (xmlData[i] === '<') { + angleBracketsCount++; + } else if (xmlData[i] === '>') { + angleBracketsCount--; + if (angleBracketsCount === 0) { + break; + } + } + } + } else if ( + xmlData.length > i + 9 && + xmlData[i + 1] === '[' && + xmlData[i + 2] === 'C' && + xmlData[i + 3] === 'D' && + xmlData[i + 4] === 'A' && + xmlData[i + 5] === 'T' && + xmlData[i + 6] === 'A' && + xmlData[i + 7] === '[' + ) { + for (i += 8; i < xmlData.length; i++) { + if (xmlData[i] === ']' && xmlData[i + 1] === ']' && xmlData[i + 2] === '>') { + i += 2; + break; + } + } + } + + return i; +} + +const doubleQuote = '"'; +const singleQuote = "'"; + +/** + * Keep reading xmlData until '<' is found outside the attribute value. + * @param {string} xmlData + * @param {number} i + */ +function readAttributeStr(xmlData, i) { + let attrStr = ''; + let startChar = ''; + let tagClosed = false; + for (; i < xmlData.length; i++) { + if (xmlData[i] === doubleQuote || xmlData[i] === singleQuote) { + if (startChar === '') { + startChar = xmlData[i]; + } else if (startChar !== xmlData[i]) { + //if vaue is enclosed with double quote then single quotes are allowed inside the value and vice versa + } else { + startChar = ''; + } + } else if (xmlData[i] === '>') { + if (startChar === '') { + tagClosed = true; + break; + } + } + attrStr += xmlData[i]; + } + if (startChar !== '') { + return false; + } + + return { + value: attrStr, + index: i, + tagClosed: tagClosed + }; +} + +/** + * Select all the attributes whether valid or invalid. + */ +const validAttrStrRegxp = new RegExp('(\\s*)([^\\s=]+)(\\s*=)?(\\s*([\'"])(([\\s\\S])*?)\\5)?', 'g'); + +//attr, ="sd", a="amit's", a="sd"b="saf", ab cd="" + +function validateAttributeString(attrStr, options) { + //console.log("start:"+attrStr+":end"); + + //if(attrStr.trim().length === 0) return true; //empty string + + const matches = util.getAllMatches(attrStr, validAttrStrRegxp); + const attrNames = {}; + + for (let i = 0; i < matches.length; i++) { + if (matches[i][1].length === 0) { + //nospace before attribute name: a="sd"b="saf" + return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' has no space in starting.", getPositionFromMatch(matches[i])) + } else if (matches[i][3] !== undefined && matches[i][4] === undefined) { + return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' is without value.", getPositionFromMatch(matches[i])); + } else if (matches[i][3] === undefined && !options.allowBooleanAttributes) { + //independent attribute: ab + return getErrorObject('InvalidAttr', "boolean attribute '"+matches[i][2]+"' is not allowed.", getPositionFromMatch(matches[i])); + } + /* else if(matches[i][6] === undefined){//attribute without value: ab= + return { err: { code:"InvalidAttr",msg:"attribute " + matches[i][2] + " has no value assigned."}}; + } */ + const attrName = matches[i][2]; + if (!validateAttrName(attrName)) { + return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is an invalid name.", getPositionFromMatch(matches[i])); + } + if (!attrNames.hasOwnProperty(attrName)) { + //check for duplicate attribute. + attrNames[attrName] = 1; + } else { + return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is repeated.", getPositionFromMatch(matches[i])); + } + } + + return true; +} + +function validateNumberAmpersand(xmlData, i) { + let re = /\d/; + if (xmlData[i] === 'x') { + i++; + re = /[\da-fA-F]/; + } + for (; i < xmlData.length; i++) { + if (xmlData[i] === ';') + return i; + if (!xmlData[i].match(re)) + break; + } + return -1; +} + +function validateAmpersand(xmlData, i) { + // https://www.w3.org/TR/xml/#dt-charref + i++; + if (xmlData[i] === ';') + return -1; + if (xmlData[i] === '#') { + i++; + return validateNumberAmpersand(xmlData, i); + } + let count = 0; + for (; i < xmlData.length; i++, count++) { + if (xmlData[i].match(/\w/) && count < 20) + continue; + if (xmlData[i] === ';') + break; + return -1; + } + return i; +} + +function getErrorObject(code, message, lineNumber) { + return { + err: { + code: code, + msg: message, + line: lineNumber.line || lineNumber, + col: lineNumber.col, + }, + }; +} + +function validateAttrName(attrName) { + return util.isName(attrName); +} + +// const startsWithXML = /^xml/i; + +function validateTagName(tagname) { + return util.isName(tagname) /* && !tagname.match(startsWithXML) */; +} + +//this function returns the line number for the character at the given index +function getLineNumberForPosition(xmlData, index) { + const lines = xmlData.substring(0, index).split(/\r?\n/); + return { + line: lines.length, + + // column number is last line's length + 1, because column numbering starts at 1: + col: lines[lines.length - 1].length + 1 + }; +} + +//this function returns the position of the first character of match within attrStr +function getPositionFromMatch(match) { + return match.startIndex + match[1].length; +} + + +/***/ }), + +/***/ 659: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +//parse Empty Node as self closing node +const buildFromOrderedJs = __nccwpck_require__(6378); +const getIgnoreAttributesFn = __nccwpck_require__(812) + +const defaultOptions = { + attributeNamePrefix: '@_', + attributesGroupName: false, + textNodeName: '#text', + ignoreAttributes: true, + cdataPropName: false, + format: false, + indentBy: ' ', + suppressEmptyNode: false, + suppressUnpairedNode: true, + suppressBooleanAttributes: true, + tagValueProcessor: function(key, a) { + return a; + }, + attributeValueProcessor: function(attrName, a) { + return a; + }, + preserveOrder: false, + commentPropName: false, + unpairedTags: [], + entities: [ + { regex: new RegExp("&", "g"), val: "&" },//it must be on top + { regex: new RegExp(">", "g"), val: ">" }, + { regex: new RegExp("<", "g"), val: "<" }, + { regex: new RegExp("\'", "g"), val: "'" }, + { regex: new RegExp("\"", "g"), val: """ } + ], + processEntities: true, + stopNodes: [], + // transformTagName: false, + // transformAttributeName: false, + oneListGroup: false +}; + +function Builder(options) { + this.options = Object.assign({}, defaultOptions, options); + if (this.options.ignoreAttributes === true || this.options.attributesGroupName) { + this.isAttribute = function(/*a*/) { + return false; + }; + } else { + this.ignoreAttributesFn = getIgnoreAttributesFn(this.options.ignoreAttributes) + this.attrPrefixLen = this.options.attributeNamePrefix.length; + this.isAttribute = isAttribute; + } + + this.processTextOrObjNode = processTextOrObjNode + + if (this.options.format) { + this.indentate = indentate; + this.tagEndChar = '>\n'; + this.newLine = '\n'; + } else { + this.indentate = function() { + return ''; + }; + this.tagEndChar = '>'; + this.newLine = ''; + } +} + +Builder.prototype.build = function(jObj) { + if(this.options.preserveOrder){ + return buildFromOrderedJs(jObj, this.options); + }else { + if(Array.isArray(jObj) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1){ + jObj = { + [this.options.arrayNodeName] : jObj + } + } + return this.j2x(jObj, 0, []).val; + } +}; + +Builder.prototype.j2x = function(jObj, level, ajPath) { + let attrStr = ''; + let val = ''; + const jPath = ajPath.join('.') + for (let key in jObj) { + if(!Object.prototype.hasOwnProperty.call(jObj, key)) continue; + if (typeof jObj[key] === 'undefined') { + // supress undefined node only if it is not an attribute + if (this.isAttribute(key)) { + val += ''; + } + } else if (jObj[key] === null) { + // null attribute should be ignored by the attribute list, but should not cause the tag closing + if (this.isAttribute(key)) { + val += ''; + } else if (key === this.options.cdataPropName) { + val += ''; + } else if (key[0] === '?') { + val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; + } else { + val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } + // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } else if (jObj[key] instanceof Date) { + val += this.buildTextValNode(jObj[key], key, '', level); + } else if (typeof jObj[key] !== 'object') { + //premitive type + const attr = this.isAttribute(key); + if (attr && !this.ignoreAttributesFn(attr, jPath)) { + attrStr += this.buildAttrPairStr(attr, '' + jObj[key]); + } else if (!attr) { + //tag value + if (key === this.options.textNodeName) { + let newval = this.options.tagValueProcessor(key, '' + jObj[key]); + val += this.replaceEntitiesValue(newval); + } else { + val += this.buildTextValNode(jObj[key], key, '', level); + } + } + } else if (Array.isArray(jObj[key])) { + //repeated nodes + const arrLen = jObj[key].length; + let listTagVal = ""; + let listTagAttr = ""; + for (let j = 0; j < arrLen; j++) { + const item = jObj[key][j]; + if (typeof item === 'undefined') { + // supress undefined node + } else if (item === null) { + if(key[0] === "?") val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; + else val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } else if (typeof item === 'object') { + if(this.options.oneListGroup){ + const result = this.j2x(item, level + 1, ajPath.concat(key)); + listTagVal += result.val; + if (this.options.attributesGroupName && item.hasOwnProperty(this.options.attributesGroupName)) { + listTagAttr += result.attrStr + } + }else{ + listTagVal += this.processTextOrObjNode(item, key, level, ajPath) + } + } else { + if (this.options.oneListGroup) { + let textValue = this.options.tagValueProcessor(key, item); + textValue = this.replaceEntitiesValue(textValue); + listTagVal += textValue; + } else { + listTagVal += this.buildTextValNode(item, key, '', level); + } + } + } + if(this.options.oneListGroup){ + listTagVal = this.buildObjectNode(listTagVal, key, listTagAttr, level); + } + val += listTagVal; + } else { + //nested node + if (this.options.attributesGroupName && key === this.options.attributesGroupName) { + const Ks = Object.keys(jObj[key]); + const L = Ks.length; + for (let j = 0; j < L; j++) { + attrStr += this.buildAttrPairStr(Ks[j], '' + jObj[key][Ks[j]]); + } + } else { + val += this.processTextOrObjNode(jObj[key], key, level, ajPath) + } + } + } + return {attrStr: attrStr, val: val}; +}; + +Builder.prototype.buildAttrPairStr = function(attrName, val){ + val = this.options.attributeValueProcessor(attrName, '' + val); + val = this.replaceEntitiesValue(val); + if (this.options.suppressBooleanAttributes && val === "true") { + return ' ' + attrName; + } else return ' ' + attrName + '="' + val + '"'; +} + +function processTextOrObjNode (object, key, level, ajPath) { + const result = this.j2x(object, level + 1, ajPath.concat(key)); + if (object[this.options.textNodeName] !== undefined && Object.keys(object).length === 1) { + return this.buildTextValNode(object[this.options.textNodeName], key, result.attrStr, level); + } else { + return this.buildObjectNode(result.val, key, result.attrStr, level); + } +} + +Builder.prototype.buildObjectNode = function(val, key, attrStr, level) { + if(val === ""){ + if(key[0] === "?") return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; + else { + return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; + } + }else{ + + let tagEndExp = '' + val + tagEndExp ); + } else if (this.options.commentPropName !== false && key === this.options.commentPropName && piClosingChar.length === 0) { + return this.indentate(level) + `` + this.newLine; + }else { + return ( + this.indentate(level) + '<' + key + attrStr + piClosingChar + this.tagEndChar + + val + + this.indentate(level) + tagEndExp ); + } + } +} + +Builder.prototype.closeTag = function(key){ + let closeTag = ""; + if(this.options.unpairedTags.indexOf(key) !== -1){ //unpaired + if(!this.options.suppressUnpairedNode) closeTag = "/" + }else if(this.options.suppressEmptyNode){ //empty + closeTag = "/"; + }else{ + closeTag = `>` + this.newLine; + }else if (this.options.commentPropName !== false && key === this.options.commentPropName) { + return this.indentate(level) + `` + this.newLine; + }else if(key[0] === "?") {//PI tag + return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; + }else{ + let textValue = this.options.tagValueProcessor(key, val); + textValue = this.replaceEntitiesValue(textValue); + + if( textValue === ''){ + return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; + }else{ + return this.indentate(level) + '<' + key + attrStr + '>' + + textValue + + ' 0 && this.options.processEntities){ + for (let i=0; i { + +const EOL = "\n"; + +/** + * + * @param {array} jArray + * @param {any} options + * @returns + */ +function toXml(jArray, options) { + let indentation = ""; + if (options.format && options.indentBy.length > 0) { + indentation = EOL; + } + return arrToStr(jArray, options, "", indentation); +} + +function arrToStr(arr, options, jPath, indentation) { + let xmlStr = ""; + let isPreviousElementTag = false; + + for (let i = 0; i < arr.length; i++) { + const tagObj = arr[i]; + const tagName = propName(tagObj); + if(tagName === undefined) continue; + + let newJPath = ""; + if (jPath.length === 0) newJPath = tagName + else newJPath = `${jPath}.${tagName}`; + + if (tagName === options.textNodeName) { + let tagText = tagObj[tagName]; + if (!isStopNode(newJPath, options)) { + tagText = options.tagValueProcessor(tagName, tagText); + tagText = replaceEntitiesValue(tagText, options); + } + if (isPreviousElementTag) { + xmlStr += indentation; + } + xmlStr += tagText; + isPreviousElementTag = false; + continue; + } else if (tagName === options.cdataPropName) { + if (isPreviousElementTag) { + xmlStr += indentation; + } + xmlStr += ``; + isPreviousElementTag = false; + continue; + } else if (tagName === options.commentPropName) { + xmlStr += indentation + ``; + isPreviousElementTag = true; + continue; + } else if (tagName[0] === "?") { + const attStr = attr_to_str(tagObj[":@"], options); + const tempInd = tagName === "?xml" ? "" : indentation; + let piTextNodeName = tagObj[tagName][0][options.textNodeName]; + piTextNodeName = piTextNodeName.length !== 0 ? " " + piTextNodeName : ""; //remove extra spacing + xmlStr += tempInd + `<${tagName}${piTextNodeName}${attStr}?>`; + isPreviousElementTag = true; + continue; + } + let newIdentation = indentation; + if (newIdentation !== "") { + newIdentation += options.indentBy; + } + const attStr = attr_to_str(tagObj[":@"], options); + const tagStart = indentation + `<${tagName}${attStr}`; + const tagValue = arrToStr(tagObj[tagName], options, newJPath, newIdentation); + if (options.unpairedTags.indexOf(tagName) !== -1) { + if (options.suppressUnpairedNode) xmlStr += tagStart + ">"; + else xmlStr += tagStart + "/>"; + } else if ((!tagValue || tagValue.length === 0) && options.suppressEmptyNode) { + xmlStr += tagStart + "/>"; + } else if (tagValue && tagValue.endsWith(">")) { + xmlStr += tagStart + `>${tagValue}${indentation}`; + } else { + xmlStr += tagStart + ">"; + if (tagValue && indentation !== "" && (tagValue.includes("/>") || tagValue.includes("`; + } + isPreviousElementTag = true; + } + + return xmlStr; +} + +function propName(obj) { + const keys = Object.keys(obj); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + if(!obj.hasOwnProperty(key)) continue; + if (key !== ":@") return key; + } +} + +function attr_to_str(attrMap, options) { + let attrStr = ""; + if (attrMap && !options.ignoreAttributes) { + for (let attr in attrMap) { + if(!attrMap.hasOwnProperty(attr)) continue; + let attrVal = options.attributeValueProcessor(attr, attrMap[attr]); + attrVal = replaceEntitiesValue(attrVal, options); + if (attrVal === true && options.suppressBooleanAttributes) { + attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}`; + } else { + attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}="${attrVal}"`; + } + } + } + return attrStr; +} + +function isStopNode(jPath, options) { + jPath = jPath.substr(0, jPath.length - options.textNodeName.length - 1); + let tagName = jPath.substr(jPath.lastIndexOf(".") + 1); + for (let index in options.stopNodes) { + if (options.stopNodes[index] === jPath || options.stopNodes[index] === "*." + tagName) return true; + } + return false; +} + +function replaceEntitiesValue(textValue, options) { + if (textValue && textValue.length > 0 && options.processEntities) { + for (let i = 0; i < options.entities.length; i++) { + const entity = options.entities[i]; + textValue = textValue.replace(entity.regex, entity.val); + } + } + return textValue; +} +module.exports = toXml; + + +/***/ }), + +/***/ 151: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const util = __nccwpck_require__(7019); + +//TODO: handle comments +function readDocType(xmlData, i){ + + const entities = {}; + if( xmlData[i + 3] === 'O' && + xmlData[i + 4] === 'C' && + xmlData[i + 5] === 'T' && + xmlData[i + 6] === 'Y' && + xmlData[i + 7] === 'P' && + xmlData[i + 8] === 'E') + { + i = i+9; + let angleBracketsCount = 1; + let hasBody = false, comment = false; + let exp = ""; + for(;i') { //Read tag content + if(comment){ + if( xmlData[i - 1] === "-" && xmlData[i - 2] === "-"){ + comment = false; + angleBracketsCount--; + } + }else{ + angleBracketsCount--; + } + if (angleBracketsCount === 0) { + break; + } + }else if( xmlData[i] === '['){ + hasBody = true; + }else{ + exp += xmlData[i]; + } + } + if(angleBracketsCount !== 0){ + throw new Error(`Unclosed DOCTYPE`); + } + }else{ + throw new Error(`Invalid Tag instead of DOCTYPE`); + } + return {entities, i}; +} + +function readEntityExp(xmlData,i){ + //External entities are not supported + // + + //Parameter entities are not supported + // + + //Internal entities are supported + // + + //read EntityName + let entityName = ""; + for (; i < xmlData.length && (xmlData[i] !== "'" && xmlData[i] !== '"' ); i++) { + // if(xmlData[i] === " ") continue; + // else + entityName += xmlData[i]; + } + entityName = entityName.trim(); + if(entityName.indexOf(" ") !== -1) throw new Error("External entites are not supported"); + + //read Entity Value + const startChar = xmlData[i++]; + let val = "" + for (; i < xmlData.length && xmlData[i] !== startChar ; i++) { + val += xmlData[i]; + } + return [entityName, val, i]; +} + +function isComment(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === '-' && + xmlData[i+3] === '-') return true + return false +} +function isEntity(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'E' && + xmlData[i+3] === 'N' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'I' && + xmlData[i+6] === 'T' && + xmlData[i+7] === 'Y') return true + return false +} +function isElement(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'E' && + xmlData[i+3] === 'L' && + xmlData[i+4] === 'E' && + xmlData[i+5] === 'M' && + xmlData[i+6] === 'E' && + xmlData[i+7] === 'N' && + xmlData[i+8] === 'T') return true + return false +} + +function isAttlist(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'A' && + xmlData[i+3] === 'T' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'L' && + xmlData[i+6] === 'I' && + xmlData[i+7] === 'S' && + xmlData[i+8] === 'T') return true + return false +} +function isNotation(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'N' && + xmlData[i+3] === 'O' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'A' && + xmlData[i+6] === 'T' && + xmlData[i+7] === 'I' && + xmlData[i+8] === 'O' && + xmlData[i+9] === 'N') return true + return false +} + +function validateEntityName(name){ + if (util.isName(name)) + return name; + else + throw new Error(`Invalid entity name ${name}`); +} + +module.exports = readDocType; + + +/***/ }), + +/***/ 4769: +/***/ ((__unused_webpack_module, exports) => { + + +const defaultOptions = { + preserveOrder: false, + attributeNamePrefix: '@_', + attributesGroupName: false, + textNodeName: '#text', + ignoreAttributes: true, + removeNSPrefix: false, // remove NS from tag name or attribute name if true + allowBooleanAttributes: false, //a tag can have attributes without any value + //ignoreRootElement : false, + parseTagValue: true, + parseAttributeValue: false, + trimValues: true, //Trim string values of tag and attributes + cdataPropName: false, + numberParseOptions: { + hex: true, + leadingZeros: true, + eNotation: true + }, + tagValueProcessor: function(tagName, val) { + return val; + }, + attributeValueProcessor: function(attrName, val) { + return val; + }, + stopNodes: [], //nested tags will not be parsed even for errors + alwaysCreateTextNode: false, + isArray: () => false, + commentPropName: false, + unpairedTags: [], + processEntities: true, + htmlEntities: false, + ignoreDeclaration: false, + ignorePiTags: false, + transformTagName: false, + transformAttributeName: false, + updateTag: function(tagName, jPath, attrs){ + return tagName + }, + // skipEmptyListItem: false +}; + +const buildOptions = function(options) { + return Object.assign({}, defaultOptions, options); +}; + +exports.buildOptions = buildOptions; +exports.defaultOptions = defaultOptions; + +/***/ }), + +/***/ 3017: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +///@ts-check + +const util = __nccwpck_require__(7019); +const xmlNode = __nccwpck_require__(9307); +const readDocType = __nccwpck_require__(151); +const toNumber = __nccwpck_require__(6496); +const getIgnoreAttributesFn = __nccwpck_require__(812) + +// const regx = +// '<((!\\[CDATA\\[([\\s\\S]*?)(]]>))|((NAME:)?(NAME))([^>]*)>|((\\/)(NAME)\\s*>))([^<]*)' +// .replace(/NAME/g, util.nameRegexp); + +//const tagsRegx = new RegExp("<(\\/?[\\w:\\-\._]+)([^>]*)>(\\s*"+cdataRegx+")*([^<]+)?","g"); +//const tagsRegx = new RegExp("<(\\/?)((\\w*:)?([\\w:\\-\._]+))([^>]*)>([^<]*)("+cdataRegx+"([^<]*))*([^<]+)?","g"); + +class OrderedObjParser{ + constructor(options){ + this.options = options; + this.currentNode = null; + this.tagsNodeStack = []; + this.docTypeEntities = {}; + this.lastEntities = { + "apos" : { regex: /&(apos|#39|#x27);/g, val : "'"}, + "gt" : { regex: /&(gt|#62|#x3E);/g, val : ">"}, + "lt" : { regex: /&(lt|#60|#x3C);/g, val : "<"}, + "quot" : { regex: /&(quot|#34|#x22);/g, val : "\""}, + }; + this.ampEntity = { regex: /&(amp|#38|#x26);/g, val : "&"}; + this.htmlEntities = { + "space": { regex: /&(nbsp|#160);/g, val: " " }, + // "lt" : { regex: /&(lt|#60);/g, val: "<" }, + // "gt" : { regex: /&(gt|#62);/g, val: ">" }, + // "amp" : { regex: /&(amp|#38);/g, val: "&" }, + // "quot" : { regex: /&(quot|#34);/g, val: "\"" }, + // "apos" : { regex: /&(apos|#39);/g, val: "'" }, + "cent" : { regex: /&(cent|#162);/g, val: "¢" }, + "pound" : { regex: /&(pound|#163);/g, val: "£" }, + "yen" : { regex: /&(yen|#165);/g, val: "¥" }, + "euro" : { regex: /&(euro|#8364);/g, val: "€" }, + "copyright" : { regex: /&(copy|#169);/g, val: "©" }, + "reg" : { regex: /&(reg|#174);/g, val: "®" }, + "inr" : { regex: /&(inr|#8377);/g, val: "₹" }, + "num_dec": { regex: /&#([0-9]{1,7});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 10)) }, + "num_hex": { regex: /&#x([0-9a-fA-F]{1,6});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 16)) }, + }; + this.addExternalEntities = addExternalEntities; + this.parseXml = parseXml; + this.parseTextData = parseTextData; + this.resolveNameSpace = resolveNameSpace; + this.buildAttributesMap = buildAttributesMap; + this.isItStopNode = isItStopNode; + this.replaceEntitiesValue = replaceEntitiesValue; + this.readStopNodeData = readStopNodeData; + this.saveTextToParentTag = saveTextToParentTag; + this.addChild = addChild; + this.ignoreAttributesFn = getIgnoreAttributesFn(this.options.ignoreAttributes) + } + +} + +function addExternalEntities(externalEntities){ + const entKeys = Object.keys(externalEntities); + for (let i = 0; i < entKeys.length; i++) { + const ent = entKeys[i]; + this.lastEntities[ent] = { + regex: new RegExp("&"+ent+";","g"), + val : externalEntities[ent] + } + } +} + +/** + * @param {string} val + * @param {string} tagName + * @param {string} jPath + * @param {boolean} dontTrim + * @param {boolean} hasAttributes + * @param {boolean} isLeafNode + * @param {boolean} escapeEntities + */ +function parseTextData(val, tagName, jPath, dontTrim, hasAttributes, isLeafNode, escapeEntities) { + if (val !== undefined) { + if (this.options.trimValues && !dontTrim) { + val = val.trim(); + } + if(val.length > 0){ + if(!escapeEntities) val = this.replaceEntitiesValue(val); + + const newval = this.options.tagValueProcessor(tagName, val, jPath, hasAttributes, isLeafNode); + if(newval === null || newval === undefined){ + //don't parse + return val; + }else if(typeof newval !== typeof val || newval !== val){ + //overwrite + return newval; + }else if(this.options.trimValues){ + return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); + }else{ + const trimmedVal = val.trim(); + if(trimmedVal === val){ + return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); + }else{ + return val; + } + } + } + } +} + +function resolveNameSpace(tagname) { + if (this.options.removeNSPrefix) { + const tags = tagname.split(':'); + const prefix = tagname.charAt(0) === '/' ? '/' : ''; + if (tags[0] === 'xmlns') { + return ''; + } + if (tags.length === 2) { + tagname = prefix + tags[1]; + } + } + return tagname; +} + +//TODO: change regex to capture NS +//const attrsRegx = new RegExp("([\\w\\-\\.\\:]+)\\s*=\\s*(['\"])((.|\n)*?)\\2","gm"); +const attrsRegx = new RegExp('([^\\s=]+)\\s*(=\\s*([\'"])([\\s\\S]*?)\\3)?', 'gm'); + +function buildAttributesMap(attrStr, jPath, tagName) { + if (this.options.ignoreAttributes !== true && typeof attrStr === 'string') { + // attrStr = attrStr.replace(/\r?\n/g, ' '); + //attrStr = attrStr || attrStr.trim(); + + const matches = util.getAllMatches(attrStr, attrsRegx); + const len = matches.length; //don't make it inline + const attrs = {}; + for (let i = 0; i < len; i++) { + const attrName = this.resolveNameSpace(matches[i][1]); + if (this.ignoreAttributesFn(attrName, jPath)) { + continue + } + let oldVal = matches[i][4]; + let aName = this.options.attributeNamePrefix + attrName; + if (attrName.length) { + if (this.options.transformAttributeName) { + aName = this.options.transformAttributeName(aName); + } + if(aName === "__proto__") aName = "#__proto__"; + if (oldVal !== undefined) { + if (this.options.trimValues) { + oldVal = oldVal.trim(); + } + oldVal = this.replaceEntitiesValue(oldVal); + const newVal = this.options.attributeValueProcessor(attrName, oldVal, jPath); + if(newVal === null || newVal === undefined){ + //don't parse + attrs[aName] = oldVal; + }else if(typeof newVal !== typeof oldVal || newVal !== oldVal){ + //overwrite + attrs[aName] = newVal; + }else{ + //parse + attrs[aName] = parseValue( + oldVal, + this.options.parseAttributeValue, + this.options.numberParseOptions + ); + } + } else if (this.options.allowBooleanAttributes) { + attrs[aName] = true; + } + } + } + if (!Object.keys(attrs).length) { + return; + } + if (this.options.attributesGroupName) { + const attrCollection = {}; + attrCollection[this.options.attributesGroupName] = attrs; + return attrCollection; + } + return attrs + } +} + +const parseXml = function(xmlData) { + xmlData = xmlData.replace(/\r\n?/g, "\n"); //TODO: remove this line + const xmlObj = new xmlNode('!xml'); + let currentNode = xmlObj; + let textData = ""; + let jPath = ""; + for(let i=0; i< xmlData.length; i++){//for each char in XML data + const ch = xmlData[i]; + if(ch === '<'){ + // const nextIndex = i+1; + // const _2ndChar = xmlData[nextIndex]; + if( xmlData[i+1] === '/') {//Closing Tag + const closeIndex = findClosingIndex(xmlData, ">", i, "Closing Tag is not closed.") + let tagName = xmlData.substring(i+2,closeIndex).trim(); + + if(this.options.removeNSPrefix){ + const colonIndex = tagName.indexOf(":"); + if(colonIndex !== -1){ + tagName = tagName.substr(colonIndex+1); + } + } + + if(this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + if(currentNode){ + textData = this.saveTextToParentTag(textData, currentNode, jPath); + } + + //check if last tag of nested tag was unpaired tag + const lastTagName = jPath.substring(jPath.lastIndexOf(".")+1); + if(tagName && this.options.unpairedTags.indexOf(tagName) !== -1 ){ + throw new Error(`Unpaired tag can not be used as closing tag: `); + } + let propIndex = 0 + if(lastTagName && this.options.unpairedTags.indexOf(lastTagName) !== -1 ){ + propIndex = jPath.lastIndexOf('.', jPath.lastIndexOf('.')-1) + this.tagsNodeStack.pop(); + }else{ + propIndex = jPath.lastIndexOf("."); + } + jPath = jPath.substring(0, propIndex); + + currentNode = this.tagsNodeStack.pop();//avoid recursion, set the parent tag scope + textData = ""; + i = closeIndex; + } else if( xmlData[i+1] === '?') { + + let tagData = readTagExp(xmlData,i, false, "?>"); + if(!tagData) throw new Error("Pi Tag is not closed."); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + if( (this.options.ignoreDeclaration && tagData.tagName === "?xml") || this.options.ignorePiTags){ + + }else{ + + const childNode = new xmlNode(tagData.tagName); + childNode.add(this.options.textNodeName, ""); + + if(tagData.tagName !== tagData.tagExp && tagData.attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagData.tagExp, jPath, tagData.tagName); + } + this.addChild(currentNode, childNode, jPath) + + } + + + i = tagData.closeIndex + 1; + } else if(xmlData.substr(i + 1, 3) === '!--') { + const endIndex = findClosingIndex(xmlData, "-->", i+4, "Comment is not closed.") + if(this.options.commentPropName){ + const comment = xmlData.substring(i + 4, endIndex - 2); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + + currentNode.add(this.options.commentPropName, [ { [this.options.textNodeName] : comment } ]); + } + i = endIndex; + } else if( xmlData.substr(i + 1, 2) === '!D') { + const result = readDocType(xmlData, i); + this.docTypeEntities = result.entities; + i = result.i; + }else if(xmlData.substr(i + 1, 2) === '![') { + const closeIndex = findClosingIndex(xmlData, "]]>", i, "CDATA is not closed.") - 2; + const tagExp = xmlData.substring(i + 9,closeIndex); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + + let val = this.parseTextData(tagExp, currentNode.tagname, jPath, true, false, true, true); + if(val == undefined) val = ""; + + //cdata should be set even if it is 0 length string + if(this.options.cdataPropName){ + currentNode.add(this.options.cdataPropName, [ { [this.options.textNodeName] : tagExp } ]); + }else{ + currentNode.add(this.options.textNodeName, val); + } + + i = closeIndex + 2; + }else {//Opening tag + let result = readTagExp(xmlData,i, this.options.removeNSPrefix); + let tagName= result.tagName; + const rawTagName = result.rawTagName; + let tagExp = result.tagExp; + let attrExpPresent = result.attrExpPresent; + let closeIndex = result.closeIndex; + + if (this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + //save text as child node + if (currentNode && textData) { + if(currentNode.tagname !== '!xml'){ + //when nested tag is found + textData = this.saveTextToParentTag(textData, currentNode, jPath, false); + } + } + + //check if last tag was unpaired tag + const lastTag = currentNode; + if(lastTag && this.options.unpairedTags.indexOf(lastTag.tagname) !== -1 ){ + currentNode = this.tagsNodeStack.pop(); + jPath = jPath.substring(0, jPath.lastIndexOf(".")); + } + if(tagName !== xmlObj.tagname){ + jPath += jPath ? "." + tagName : tagName; + } + if (this.isItStopNode(this.options.stopNodes, jPath, tagName)) { + let tagContent = ""; + //self-closing tag + if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ + if(tagName[tagName.length - 1] === "/"){ //remove trailing '/' + tagName = tagName.substr(0, tagName.length - 1); + jPath = jPath.substr(0, jPath.length - 1); + tagExp = tagName; + }else{ + tagExp = tagExp.substr(0, tagExp.length - 1); + } + i = result.closeIndex; + } + //unpaired tag + else if(this.options.unpairedTags.indexOf(tagName) !== -1){ + + i = result.closeIndex; + } + //normal tag + else{ + //read until closing tag is found + const result = this.readStopNodeData(xmlData, rawTagName, closeIndex + 1); + if(!result) throw new Error(`Unexpected end of ${rawTagName}`); + i = result.i; + tagContent = result.tagContent; + } + + const childNode = new xmlNode(tagName); + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + if(tagContent) { + tagContent = this.parseTextData(tagContent, tagName, jPath, true, attrExpPresent, true, true); + } + + jPath = jPath.substr(0, jPath.lastIndexOf(".")); + childNode.add(this.options.textNodeName, tagContent); + + this.addChild(currentNode, childNode, jPath) + }else{ + //selfClosing tag + if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ + if(tagName[tagName.length - 1] === "/"){ //remove trailing '/' + tagName = tagName.substr(0, tagName.length - 1); + jPath = jPath.substr(0, jPath.length - 1); + tagExp = tagName; + }else{ + tagExp = tagExp.substr(0, tagExp.length - 1); + } + + if(this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + const childNode = new xmlNode(tagName); + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + this.addChild(currentNode, childNode, jPath) + jPath = jPath.substr(0, jPath.lastIndexOf(".")); + } + //opening tag + else{ + const childNode = new xmlNode( tagName); + this.tagsNodeStack.push(currentNode); + + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + this.addChild(currentNode, childNode, jPath) + currentNode = childNode; + } + textData = ""; + i = closeIndex; + } + } + }else{ + textData += xmlData[i]; + } + } + return xmlObj.child; +} + +function addChild(currentNode, childNode, jPath){ + const result = this.options.updateTag(childNode.tagname, jPath, childNode[":@"]) + if(result === false){ + }else if(typeof result === "string"){ + childNode.tagname = result + currentNode.addChild(childNode); + }else{ + currentNode.addChild(childNode); + } +} + +const replaceEntitiesValue = function(val){ + + if(this.options.processEntities){ + for(let entityName in this.docTypeEntities){ + const entity = this.docTypeEntities[entityName]; + val = val.replace( entity.regx, entity.val); + } + for(let entityName in this.lastEntities){ + const entity = this.lastEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + if(this.options.htmlEntities){ + for(let entityName in this.htmlEntities){ + const entity = this.htmlEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + } + val = val.replace( this.ampEntity.regex, this.ampEntity.val); + } + return val; +} +function saveTextToParentTag(textData, currentNode, jPath, isLeafNode) { + if (textData) { //store previously collected data as textNode + if(isLeafNode === undefined) isLeafNode = currentNode.child.length === 0 + + textData = this.parseTextData(textData, + currentNode.tagname, + jPath, + false, + currentNode[":@"] ? Object.keys(currentNode[":@"]).length !== 0 : false, + isLeafNode); + + if (textData !== undefined && textData !== "") + currentNode.add(this.options.textNodeName, textData); + textData = ""; + } + return textData; +} + +//TODO: use jPath to simplify the logic +/** + * + * @param {string[]} stopNodes + * @param {string} jPath + * @param {string} currentTagName + */ +function isItStopNode(stopNodes, jPath, currentTagName){ + const allNodesExp = "*." + currentTagName; + for (const stopNodePath in stopNodes) { + const stopNodeExp = stopNodes[stopNodePath]; + if( allNodesExp === stopNodeExp || jPath === stopNodeExp ) return true; + } + return false; +} + +/** + * Returns the tag Expression and where it is ending handling single-double quotes situation + * @param {string} xmlData + * @param {number} i starting index + * @returns + */ +function tagExpWithClosingIndex(xmlData, i, closingChar = ">"){ + let attrBoundary; + let tagExp = ""; + for (let index = i; index < xmlData.length; index++) { + let ch = xmlData[index]; + if (attrBoundary) { + if (ch === attrBoundary) attrBoundary = "";//reset + } else if (ch === '"' || ch === "'") { + attrBoundary = ch; + } else if (ch === closingChar[0]) { + if(closingChar[1]){ + if(xmlData[index + 1] === closingChar[1]){ + return { + data: tagExp, + index: index + } + } + }else{ + return { + data: tagExp, + index: index + } + } + } else if (ch === '\t') { + ch = " " + } + tagExp += ch; + } +} + +function findClosingIndex(xmlData, str, i, errMsg){ + const closingIndex = xmlData.indexOf(str, i); + if(closingIndex === -1){ + throw new Error(errMsg) + }else{ + return closingIndex + str.length - 1; + } +} + +function readTagExp(xmlData,i, removeNSPrefix, closingChar = ">"){ + const result = tagExpWithClosingIndex(xmlData, i+1, closingChar); + if(!result) return; + let tagExp = result.data; + const closeIndex = result.index; + const separatorIndex = tagExp.search(/\s/); + let tagName = tagExp; + let attrExpPresent = true; + if(separatorIndex !== -1){//separate tag name and attributes expression + tagName = tagExp.substring(0, separatorIndex); + tagExp = tagExp.substring(separatorIndex + 1).trimStart(); + } + + const rawTagName = tagName; + if(removeNSPrefix){ + const colonIndex = tagName.indexOf(":"); + if(colonIndex !== -1){ + tagName = tagName.substr(colonIndex+1); + attrExpPresent = tagName !== result.data.substr(colonIndex + 1); + } + } + + return { + tagName: tagName, + tagExp: tagExp, + closeIndex: closeIndex, + attrExpPresent: attrExpPresent, + rawTagName: rawTagName, + } +} +/** + * find paired tag for a stop node + * @param {string} xmlData + * @param {string} tagName + * @param {number} i + */ +function readStopNodeData(xmlData, tagName, i){ + const startIndex = i; + // Starting at 1 since we already have an open tag + let openTagCount = 1; + + for (; i < xmlData.length; i++) { + if( xmlData[i] === "<"){ + if (xmlData[i+1] === "/") {//close tag + const closeIndex = findClosingIndex(xmlData, ">", i, `${tagName} is not closed`); + let closeTagName = xmlData.substring(i+2,closeIndex).trim(); + if(closeTagName === tagName){ + openTagCount--; + if (openTagCount === 0) { + return { + tagContent: xmlData.substring(startIndex, i), + i : closeIndex + } + } + } + i=closeIndex; + } else if(xmlData[i+1] === '?') { + const closeIndex = findClosingIndex(xmlData, "?>", i+1, "StopNode is not closed.") + i=closeIndex; + } else if(xmlData.substr(i + 1, 3) === '!--') { + const closeIndex = findClosingIndex(xmlData, "-->", i+3, "StopNode is not closed.") + i=closeIndex; + } else if(xmlData.substr(i + 1, 2) === '![') { + const closeIndex = findClosingIndex(xmlData, "]]>", i, "StopNode is not closed.") - 2; + i=closeIndex; + } else { + const tagData = readTagExp(xmlData, i, '>') + + if (tagData) { + const openTagName = tagData && tagData.tagName; + if (openTagName === tagName && tagData.tagExp[tagData.tagExp.length-1] !== "/") { + openTagCount++; + } + i=tagData.closeIndex; + } + } + } + }//end for loop +} + +function parseValue(val, shouldParse, options) { + if (shouldParse && typeof val === 'string') { + //console.log(options) + const newval = val.trim(); + if(newval === 'true' ) return true; + else if(newval === 'false' ) return false; + else return toNumber(val, options); + } else { + if (util.isExist(val)) { + return val; + } else { + return ''; + } + } +} + + +module.exports = OrderedObjParser; + + +/***/ }), + +/***/ 9844: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const { buildOptions} = __nccwpck_require__(4769); +const OrderedObjParser = __nccwpck_require__(3017); +const { prettify} = __nccwpck_require__(7594); +const validator = __nccwpck_require__(9433); + +class XMLParser{ + + constructor(options){ + this.externalEntities = {}; + this.options = buildOptions(options); + + } + /** + * Parse XML dats to JS object + * @param {string|Buffer} xmlData + * @param {boolean|Object} validationOption + */ + parse(xmlData,validationOption){ + if(typeof xmlData === "string"){ + }else if( xmlData.toString){ + xmlData = xmlData.toString(); + }else{ + throw new Error("XML data is accepted in String or Bytes[] form.") + } + if( validationOption){ + if(validationOption === true) validationOption = {}; //validate with default options + + const result = validator.validate(xmlData, validationOption); + if (result !== true) { + throw Error( `${result.err.msg}:${result.err.line}:${result.err.col}` ) + } + } + const orderedObjParser = new OrderedObjParser(this.options); + orderedObjParser.addExternalEntities(this.externalEntities); + const orderedResult = orderedObjParser.parseXml(xmlData); + if(this.options.preserveOrder || orderedResult === undefined) return orderedResult; + else return prettify(orderedResult, this.options); + } + + /** + * Add Entity which is not by default supported by this library + * @param {string} key + * @param {string} value + */ + addEntity(key, value){ + if(value.indexOf("&") !== -1){ + throw new Error("Entity value can't have '&'") + }else if(key.indexOf("&") !== -1 || key.indexOf(";") !== -1){ + throw new Error("An entity must be set without '&' and ';'. Eg. use '#xD' for ' '") + }else if(value === "&"){ + throw new Error("An entity with value '&' is not permitted"); + }else{ + this.externalEntities[key] = value; + } + } +} + +module.exports = XMLParser; + +/***/ }), + +/***/ 7594: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +/** + * + * @param {array} node + * @param {any} options + * @returns + */ +function prettify(node, options){ + return compress( node, options); +} + +/** + * + * @param {array} arr + * @param {object} options + * @param {string} jPath + * @returns object + */ +function compress(arr, options, jPath){ + let text; + const compressedObj = {}; + for (let i = 0; i < arr.length; i++) { + const tagObj = arr[i]; + const property = propName(tagObj); + let newJpath = ""; + if(jPath === undefined) newJpath = property; + else newJpath = jPath + "." + property; + + if(property === options.textNodeName){ + if(text === undefined) text = tagObj[property]; + else text += "" + tagObj[property]; + }else if(property === undefined){ + continue; + }else if(tagObj[property]){ + + let val = compress(tagObj[property], options, newJpath); + const isLeaf = isLeafTag(val, options); + + if(tagObj[":@"]){ + assignAttributes( val, tagObj[":@"], newJpath, options); + }else if(Object.keys(val).length === 1 && val[options.textNodeName] !== undefined && !options.alwaysCreateTextNode){ + val = val[options.textNodeName]; + }else if(Object.keys(val).length === 0){ + if(options.alwaysCreateTextNode) val[options.textNodeName] = ""; + else val = ""; + } + + if(compressedObj[property] !== undefined && compressedObj.hasOwnProperty(property)) { + if(!Array.isArray(compressedObj[property])) { + compressedObj[property] = [ compressedObj[property] ]; + } + compressedObj[property].push(val); + }else{ + //TODO: if a node is not an array, then check if it should be an array + //also determine if it is a leaf node + if (options.isArray(property, newJpath, isLeaf )) { + compressedObj[property] = [val]; + }else{ + compressedObj[property] = val; + } + } + } + + } + // if(text && text.length > 0) compressedObj[options.textNodeName] = text; + if(typeof text === "string"){ + if(text.length > 0) compressedObj[options.textNodeName] = text; + }else if(text !== undefined) compressedObj[options.textNodeName] = text; + return compressedObj; +} + +function propName(obj){ + const keys = Object.keys(obj); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + if(key !== ":@") return key; + } +} + +function assignAttributes(obj, attrMap, jpath, options){ + if (attrMap) { + const keys = Object.keys(attrMap); + const len = keys.length; //don't make it inline + for (let i = 0; i < len; i++) { + const atrrName = keys[i]; + if (options.isArray(atrrName, jpath + "." + atrrName, true, true)) { + obj[atrrName] = [ attrMap[atrrName] ]; + } else { + obj[atrrName] = attrMap[atrrName]; + } + } + } +} + +function isLeafTag(obj, options){ + const { textNodeName } = options; + const propCount = Object.keys(obj).length; + + if (propCount === 0) { + return true; + } + + if ( + propCount === 1 && + (obj[textNodeName] || typeof obj[textNodeName] === "boolean" || obj[textNodeName] === 0) + ) { + return true; + } + + return false; +} +exports.prettify = prettify; + + +/***/ }), + +/***/ 9307: +/***/ ((module) => { + +"use strict"; + + +class XmlNode{ + constructor(tagname) { + this.tagname = tagname; + this.child = []; //nested tags, text, cdata, comments in order + this[":@"] = {}; //attributes map + } + add(key,val){ + // this.child.push( {name : key, val: val, isCdata: isCdata }); + if(key === "__proto__") key = "#__proto__"; + this.child.push( {[key]: val }); + } + addChild(node) { + if(node.tagname === "__proto__") node.tagname = "#__proto__"; + if(node[":@"] && Object.keys(node[":@"]).length > 0){ + this.child.push( { [node.tagname]: node.child, [":@"]: node[":@"] }); + }else{ + this.child.push( { [node.tagname]: node.child }); + } + }; +}; + + +module.exports = XmlNode; + +/***/ }), + +/***/ 7506: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +var _a; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GaxiosError = exports.GAXIOS_ERROR_SYMBOL = void 0; +exports.defaultErrorRedactor = defaultErrorRedactor; +const url_1 = __nccwpck_require__(7016); +const util_1 = __nccwpck_require__(3155); +const extend_1 = __importDefault(__nccwpck_require__(3860)); +/** + * Support `instanceof` operator for `GaxiosError`s in different versions of this library. + * + * @see {@link GaxiosError[Symbol.hasInstance]} + */ +exports.GAXIOS_ERROR_SYMBOL = Symbol.for(`${util_1.pkg.name}-gaxios-error`); +/* eslint-disable-next-line @typescript-eslint/no-explicit-any */ +class GaxiosError extends Error { + /** + * Support `instanceof` operator for `GaxiosError` across builds/duplicated files. + * + * @see {@link GAXIOS_ERROR_SYMBOL} + * @see {@link GaxiosError[GAXIOS_ERROR_SYMBOL]} + */ + static [(_a = exports.GAXIOS_ERROR_SYMBOL, Symbol.hasInstance)](instance) { + if (instance && + typeof instance === 'object' && + exports.GAXIOS_ERROR_SYMBOL in instance && + instance[exports.GAXIOS_ERROR_SYMBOL] === util_1.pkg.version) { + return true; + } + // fallback to native + return Function.prototype[Symbol.hasInstance].call(GaxiosError, instance); + } + constructor(message, config, response, error) { + var _b; + super(message); + this.config = config; + this.response = response; + this.error = error; + /** + * Support `instanceof` operator for `GaxiosError` across builds/duplicated files. + * + * @see {@link GAXIOS_ERROR_SYMBOL} + * @see {@link GaxiosError[Symbol.hasInstance]} + * @see {@link https://github.com/microsoft/TypeScript/issues/13965#issuecomment-278570200} + * @see {@link https://stackoverflow.com/questions/46618852/require-and-instanceof} + * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/@@hasInstance#reverting_to_default_instanceof_behavior} + */ + this[_a] = util_1.pkg.version; + // deep-copy config as we do not want to mutate + // the existing config for future retries/use + this.config = (0, extend_1.default)(true, {}, config); + if (this.response) { + this.response.config = (0, extend_1.default)(true, {}, this.response.config); + } + if (this.response) { + try { + this.response.data = translateData(this.config.responseType, (_b = this.response) === null || _b === void 0 ? void 0 : _b.data); + } + catch (_c) { + // best effort - don't throw an error within an error + // we could set `this.response.config.responseType = 'unknown'`, but + // that would mutate future calls with this config object. + } + this.status = this.response.status; + } + if (error && 'code' in error && error.code) { + this.code = error.code; + } + if (config.errorRedactor) { + config.errorRedactor({ + config: this.config, + response: this.response, + }); + } + } +} +exports.GaxiosError = GaxiosError; +function translateData(responseType, data) { + switch (responseType) { + case 'stream': + return data; + case 'json': + return JSON.parse(JSON.stringify(data)); + case 'arraybuffer': + return JSON.parse(Buffer.from(data).toString('utf8')); + case 'blob': + return JSON.parse(data.text()); + default: + return data; + } +} +/** + * An experimental error redactor. + * + * @param config Config to potentially redact properties of + * @param response Config to potentially redact properties of + * + * @experimental + */ +function defaultErrorRedactor(data) { + const REDACT = '< - See `errorRedactor` option in `gaxios` for configuration>.'; + function redactHeaders(headers) { + if (!headers) + return; + for (const key of Object.keys(headers)) { + // any casing of `Authentication` + if (/^authentication$/i.test(key)) { + headers[key] = REDACT; + } + // any casing of `Authorization` + if (/^authorization$/i.test(key)) { + headers[key] = REDACT; + } + // anything containing secret, such as 'client secret' + if (/secret/i.test(key)) { + headers[key] = REDACT; + } + } + } + function redactString(obj, key) { + if (typeof obj === 'object' && + obj !== null && + typeof obj[key] === 'string') { + const text = obj[key]; + if (/grant_type=/i.test(text) || + /assertion=/i.test(text) || + /secret/i.test(text)) { + obj[key] = REDACT; + } + } + } + function redactObject(obj) { + if (typeof obj === 'object' && obj !== null) { + if ('grant_type' in obj) { + obj['grant_type'] = REDACT; + } + if ('assertion' in obj) { + obj['assertion'] = REDACT; + } + if ('client_secret' in obj) { + obj['client_secret'] = REDACT; + } + } + } + if (data.config) { + redactHeaders(data.config.headers); + redactString(data.config, 'data'); + redactObject(data.config.data); + redactString(data.config, 'body'); + redactObject(data.config.body); + try { + const url = new url_1.URL('', data.config.url); + if (url.searchParams.has('token')) { + url.searchParams.set('token', REDACT); + } + if (url.searchParams.has('client_secret')) { + url.searchParams.set('client_secret', REDACT); + } + data.config.url = url.toString(); + } + catch (_b) { + // ignore error - no need to parse an invalid URL + } + } + if (data.response) { + defaultErrorRedactor({ config: data.response.config }); + redactHeaders(data.response.headers); + redactString(data.response, 'data'); + redactObject(data.response.data); + } + return data; +} +//# sourceMappingURL=common.js.map + +/***/ }), + +/***/ 6010: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +var _Gaxios_instances, _a, _Gaxios_urlMayUseProxy, _Gaxios_applyRequestInterceptors, _Gaxios_applyResponseInterceptors, _Gaxios_prepareRequest, _Gaxios_proxyAgent, _Gaxios_getProxyAgent; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Gaxios = void 0; +const extend_1 = __importDefault(__nccwpck_require__(3860)); +const https_1 = __nccwpck_require__(5692); +const node_fetch_1 = __importDefault(__nccwpck_require__(6705)); +const querystring_1 = __importDefault(__nccwpck_require__(3480)); +const is_stream_1 = __importDefault(__nccwpck_require__(6543)); +const url_1 = __nccwpck_require__(7016); +const common_1 = __nccwpck_require__(7506); +const retry_1 = __nccwpck_require__(2789); +const stream_1 = __nccwpck_require__(2203); +const uuid_1 = __nccwpck_require__(3187); +const interceptor_1 = __nccwpck_require__(5608); +/* eslint-disable @typescript-eslint/no-explicit-any */ +const fetch = hasFetch() ? window.fetch : node_fetch_1.default; +function hasWindow() { + return typeof window !== 'undefined' && !!window; +} +function hasFetch() { + return hasWindow() && !!window.fetch; +} +function hasBuffer() { + return typeof Buffer !== 'undefined'; +} +function hasHeader(options, header) { + return !!getHeader(options, header); +} +function getHeader(options, header) { + header = header.toLowerCase(); + for (const key of Object.keys((options === null || options === void 0 ? void 0 : options.headers) || {})) { + if (header === key.toLowerCase()) { + return options.headers[key]; + } + } + return undefined; +} +class Gaxios { + /** + * The Gaxios class is responsible for making HTTP requests. + * @param defaults The default set of options to be used for this instance. + */ + constructor(defaults) { + _Gaxios_instances.add(this); + this.agentCache = new Map(); + this.defaults = defaults || {}; + this.interceptors = { + request: new interceptor_1.GaxiosInterceptorManager(), + response: new interceptor_1.GaxiosInterceptorManager(), + }; + } + /** + * Perform an HTTP request with the given options. + * @param opts Set of HTTP options that will be used for this HTTP request. + */ + async request(opts = {}) { + opts = await __classPrivateFieldGet(this, _Gaxios_instances, "m", _Gaxios_prepareRequest).call(this, opts); + opts = await __classPrivateFieldGet(this, _Gaxios_instances, "m", _Gaxios_applyRequestInterceptors).call(this, opts); + return __classPrivateFieldGet(this, _Gaxios_instances, "m", _Gaxios_applyResponseInterceptors).call(this, this._request(opts)); + } + async _defaultAdapter(opts) { + const fetchImpl = opts.fetchImplementation || fetch; + const res = (await fetchImpl(opts.url, opts)); + const data = await this.getResponseData(opts, res); + return this.translateResponse(opts, res, data); + } + /** + * Internal, retryable version of the `request` method. + * @param opts Set of HTTP options that will be used for this HTTP request. + */ + async _request(opts = {}) { + var _b; + try { + let translatedResponse; + if (opts.adapter) { + translatedResponse = await opts.adapter(opts, this._defaultAdapter.bind(this)); + } + else { + translatedResponse = await this._defaultAdapter(opts); + } + if (!opts.validateStatus(translatedResponse.status)) { + if (opts.responseType === 'stream') { + let response = ''; + await new Promise(resolve => { + (translatedResponse === null || translatedResponse === void 0 ? void 0 : translatedResponse.data).on('data', chunk => { + response += chunk; + }); + (translatedResponse === null || translatedResponse === void 0 ? void 0 : translatedResponse.data).on('end', resolve); + }); + translatedResponse.data = response; + } + throw new common_1.GaxiosError(`Request failed with status code ${translatedResponse.status}`, opts, translatedResponse); + } + return translatedResponse; + } + catch (e) { + const err = e instanceof common_1.GaxiosError + ? e + : new common_1.GaxiosError(e.message, opts, undefined, e); + const { shouldRetry, config } = await (0, retry_1.getRetryConfig)(err); + if (shouldRetry && config) { + err.config.retryConfig.currentRetryAttempt = + config.retryConfig.currentRetryAttempt; + // The error's config could be redacted - therefore we only want to + // copy the retry state over to the existing config + opts.retryConfig = (_b = err.config) === null || _b === void 0 ? void 0 : _b.retryConfig; + return this._request(opts); + } + throw err; + } + } + async getResponseData(opts, res) { + switch (opts.responseType) { + case 'stream': + return res.body; + case 'json': { + let data = await res.text(); + try { + data = JSON.parse(data); + } + catch (_b) { + // continue + } + return data; + } + case 'arraybuffer': + return res.arrayBuffer(); + case 'blob': + return res.blob(); + case 'text': + return res.text(); + default: + return this.getResponseDataFromContentType(res); + } + } + /** + * By default, throw for any non-2xx status code + * @param status status code from the HTTP response + */ + validateStatus(status) { + return status >= 200 && status < 300; + } + /** + * Encode a set of key/value pars into a querystring format (?foo=bar&baz=boo) + * @param params key value pars to encode + */ + paramsSerializer(params) { + return querystring_1.default.stringify(params); + } + translateResponse(opts, res, data) { + // headers need to be converted from a map to an obj + const headers = {}; + res.headers.forEach((value, key) => { + headers[key] = value; + }); + return { + config: opts, + data: data, + headers, + status: res.status, + statusText: res.statusText, + // XMLHttpRequestLike + request: { + responseURL: res.url, + }, + }; + } + /** + * Attempts to parse a response by looking at the Content-Type header. + * @param {FetchResponse} response the HTTP response. + * @returns {Promise} a promise that resolves to the response data. + */ + async getResponseDataFromContentType(response) { + let contentType = response.headers.get('Content-Type'); + if (contentType === null) { + // Maintain existing functionality by calling text() + return response.text(); + } + contentType = contentType.toLowerCase(); + if (contentType.includes('application/json')) { + let data = await response.text(); + try { + data = JSON.parse(data); + } + catch (_b) { + // continue + } + return data; + } + else if (contentType.match(/^text\//)) { + return response.text(); + } + else { + // If the content type is something not easily handled, just return the raw data (blob) + return response.blob(); + } + } + /** + * Creates an async generator that yields the pieces of a multipart/related request body. + * This implementation follows the spec: https://www.ietf.org/rfc/rfc2387.txt. However, recursive + * multipart/related requests are not currently supported. + * + * @param {GaxioMultipartOptions[]} multipartOptions the pieces to turn into a multipart/related body. + * @param {string} boundary the boundary string to be placed between each part. + */ + async *getMultipartRequest(multipartOptions, boundary) { + const finale = `--${boundary}--`; + for (const currentPart of multipartOptions) { + const partContentType = currentPart.headers['Content-Type'] || 'application/octet-stream'; + const preamble = `--${boundary}\r\nContent-Type: ${partContentType}\r\n\r\n`; + yield preamble; + if (typeof currentPart.content === 'string') { + yield currentPart.content; + } + else { + yield* currentPart.content; + } + yield '\r\n'; + } + yield finale; + } +} +exports.Gaxios = Gaxios; +_a = Gaxios, _Gaxios_instances = new WeakSet(), _Gaxios_urlMayUseProxy = function _Gaxios_urlMayUseProxy(url, noProxy = []) { + var _b, _c; + const candidate = new url_1.URL(url); + const noProxyList = [...noProxy]; + const noProxyEnvList = ((_c = ((_b = process.env.NO_PROXY) !== null && _b !== void 0 ? _b : process.env.no_proxy)) === null || _c === void 0 ? void 0 : _c.split(',')) || []; + for (const rule of noProxyEnvList) { + noProxyList.push(rule.trim()); + } + for (const rule of noProxyList) { + // Match regex + if (rule instanceof RegExp) { + if (rule.test(candidate.toString())) { + return false; + } + } + // Match URL + else if (rule instanceof url_1.URL) { + if (rule.origin === candidate.origin) { + return false; + } + } + // Match string regex + else if (rule.startsWith('*.') || rule.startsWith('.')) { + const cleanedRule = rule.replace(/^\*\./, '.'); + if (candidate.hostname.endsWith(cleanedRule)) { + return false; + } + } + // Basic string match + else if (rule === candidate.origin || + rule === candidate.hostname || + rule === candidate.href) { + return false; + } + } + return true; +}, _Gaxios_applyRequestInterceptors = +/** + * Applies the request interceptors. The request interceptors are applied after the + * call to prepareRequest is completed. + * + * @param {GaxiosOptions} options The current set of options. + * + * @returns {Promise} Promise that resolves to the set of options or response after interceptors are applied. + */ +async function _Gaxios_applyRequestInterceptors(options) { + let promiseChain = Promise.resolve(options); + for (const interceptor of this.interceptors.request.values()) { + if (interceptor) { + promiseChain = promiseChain.then(interceptor.resolved, interceptor.rejected); + } + } + return promiseChain; +}, _Gaxios_applyResponseInterceptors = +/** + * Applies the response interceptors. The response interceptors are applied after the + * call to request is made. + * + * @param {GaxiosOptions} options The current set of options. + * + * @returns {Promise} Promise that resolves to the set of options or response after interceptors are applied. + */ +async function _Gaxios_applyResponseInterceptors(response) { + let promiseChain = Promise.resolve(response); + for (const interceptor of this.interceptors.response.values()) { + if (interceptor) { + promiseChain = promiseChain.then(interceptor.resolved, interceptor.rejected); + } + } + return promiseChain; +}, _Gaxios_prepareRequest = +/** + * Validates the options, merges them with defaults, and prepare request. + * + * @param options The original options passed from the client. + * @returns Prepared options, ready to make a request + */ +async function _Gaxios_prepareRequest(options) { + var _b, _c, _d, _e; + const opts = (0, extend_1.default)(true, {}, this.defaults, options); + if (!opts.url) { + throw new Error('URL is required.'); + } + // baseUrl has been deprecated, remove in 2.0 + const baseUrl = opts.baseUrl || opts.baseURL; + if (baseUrl) { + opts.url = baseUrl.toString() + opts.url; + } + opts.paramsSerializer = opts.paramsSerializer || this.paramsSerializer; + if (opts.params && Object.keys(opts.params).length > 0) { + let additionalQueryParams = opts.paramsSerializer(opts.params); + if (additionalQueryParams.startsWith('?')) { + additionalQueryParams = additionalQueryParams.slice(1); + } + const prefix = opts.url.toString().includes('?') ? '&' : '?'; + opts.url = opts.url + prefix + additionalQueryParams; + } + if (typeof options.maxContentLength === 'number') { + opts.size = options.maxContentLength; + } + if (typeof options.maxRedirects === 'number') { + opts.follow = options.maxRedirects; + } + opts.headers = opts.headers || {}; + if (opts.multipart === undefined && opts.data) { + const isFormData = typeof FormData === 'undefined' + ? false + : (opts === null || opts === void 0 ? void 0 : opts.data) instanceof FormData; + if (is_stream_1.default.readable(opts.data)) { + opts.body = opts.data; + } + else if (hasBuffer() && Buffer.isBuffer(opts.data)) { + // Do not attempt to JSON.stringify() a Buffer: + opts.body = opts.data; + if (!hasHeader(opts, 'Content-Type')) { + opts.headers['Content-Type'] = 'application/json'; + } + } + else if (typeof opts.data === 'object') { + // If www-form-urlencoded content type has been set, but data is + // provided as an object, serialize the content using querystring: + if (!isFormData) { + if (getHeader(opts, 'content-type') === + 'application/x-www-form-urlencoded') { + opts.body = opts.paramsSerializer(opts.data); + } + else { + // } else if (!(opts.data instanceof FormData)) { + if (!hasHeader(opts, 'Content-Type')) { + opts.headers['Content-Type'] = 'application/json'; + } + opts.body = JSON.stringify(opts.data); + } + } + } + else { + opts.body = opts.data; + } + } + else if (opts.multipart && opts.multipart.length > 0) { + // note: once the minimum version reaches Node 16, + // this can be replaced with randomUUID() function from crypto + // and the dependency on UUID removed + const boundary = (0, uuid_1.v4)(); + opts.headers['Content-Type'] = `multipart/related; boundary=${boundary}`; + const bodyStream = new stream_1.PassThrough(); + opts.body = bodyStream; + (0, stream_1.pipeline)(this.getMultipartRequest(opts.multipart, boundary), bodyStream, () => { }); + } + opts.validateStatus = opts.validateStatus || this.validateStatus; + opts.responseType = opts.responseType || 'unknown'; + if (!opts.headers['Accept'] && opts.responseType === 'json') { + opts.headers['Accept'] = 'application/json'; + } + opts.method = opts.method || 'GET'; + const proxy = opts.proxy || + ((_b = process === null || process === void 0 ? void 0 : process.env) === null || _b === void 0 ? void 0 : _b.HTTPS_PROXY) || + ((_c = process === null || process === void 0 ? void 0 : process.env) === null || _c === void 0 ? void 0 : _c.https_proxy) || + ((_d = process === null || process === void 0 ? void 0 : process.env) === null || _d === void 0 ? void 0 : _d.HTTP_PROXY) || + ((_e = process === null || process === void 0 ? void 0 : process.env) === null || _e === void 0 ? void 0 : _e.http_proxy); + const urlMayUseProxy = __classPrivateFieldGet(this, _Gaxios_instances, "m", _Gaxios_urlMayUseProxy).call(this, opts.url, opts.noProxy); + if (opts.agent) { + // don't do any of the following options - use the user-provided agent. + } + else if (proxy && urlMayUseProxy) { + const HttpsProxyAgent = await __classPrivateFieldGet(_a, _a, "m", _Gaxios_getProxyAgent).call(_a); + if (this.agentCache.has(proxy)) { + opts.agent = this.agentCache.get(proxy); + } + else { + opts.agent = new HttpsProxyAgent(proxy, { + cert: opts.cert, + key: opts.key, + }); + this.agentCache.set(proxy, opts.agent); + } + } + else if (opts.cert && opts.key) { + // Configure client for mTLS + if (this.agentCache.has(opts.key)) { + opts.agent = this.agentCache.get(opts.key); + } + else { + opts.agent = new https_1.Agent({ + cert: opts.cert, + key: opts.key, + }); + this.agentCache.set(opts.key, opts.agent); + } + } + if (typeof opts.errorRedactor !== 'function' && + opts.errorRedactor !== false) { + opts.errorRedactor = common_1.defaultErrorRedactor; + } + return opts; +}, _Gaxios_getProxyAgent = async function _Gaxios_getProxyAgent() { + __classPrivateFieldSet(this, _a, __classPrivateFieldGet(this, _a, "f", _Gaxios_proxyAgent) || (await Promise.resolve().then(() => __importStar(__nccwpck_require__(3669)))).HttpsProxyAgent, "f", _Gaxios_proxyAgent); + return __classPrivateFieldGet(this, _a, "f", _Gaxios_proxyAgent); +}; +/** + * A cache for the lazily-loaded proxy agent. + * + * Should use {@link Gaxios[#getProxyAgent]} to retrieve. + */ +// using `import` to dynamically import the types here +_Gaxios_proxyAgent = { value: void 0 }; +//# sourceMappingURL=gaxios.js.map + +/***/ }), + +/***/ 7003: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.instance = exports.Gaxios = exports.GaxiosError = void 0; +exports.request = request; +const gaxios_1 = __nccwpck_require__(6010); +Object.defineProperty(exports, "Gaxios", ({ enumerable: true, get: function () { return gaxios_1.Gaxios; } })); +var common_1 = __nccwpck_require__(7506); +Object.defineProperty(exports, "GaxiosError", ({ enumerable: true, get: function () { return common_1.GaxiosError; } })); +__exportStar(__nccwpck_require__(5608), exports); +/** + * The default instance used when the `request` method is directly + * invoked. + */ +exports.instance = new gaxios_1.Gaxios(); +/** + * Make an HTTP request using the given options. + * @param opts Options for the request + */ +async function request(opts) { + return exports.instance.request(opts); +} +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 5608: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2024 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GaxiosInterceptorManager = void 0; +/** + * Class to manage collections of GaxiosInterceptors for both requests and responses. + */ +class GaxiosInterceptorManager extends Set { +} +exports.GaxiosInterceptorManager = GaxiosInterceptorManager; +//# sourceMappingURL=interceptor.js.map + +/***/ }), + +/***/ 2789: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getRetryConfig = getRetryConfig; +async function getRetryConfig(err) { + let config = getConfig(err); + if (!err || !err.config || (!config && !err.config.retry)) { + return { shouldRetry: false }; + } + config = config || {}; + config.currentRetryAttempt = config.currentRetryAttempt || 0; + config.retry = + config.retry === undefined || config.retry === null ? 3 : config.retry; + config.httpMethodsToRetry = config.httpMethodsToRetry || [ + 'GET', + 'HEAD', + 'PUT', + 'OPTIONS', + 'DELETE', + ]; + config.noResponseRetries = + config.noResponseRetries === undefined || config.noResponseRetries === null + ? 2 + : config.noResponseRetries; + config.retryDelayMultiplier = config.retryDelayMultiplier + ? config.retryDelayMultiplier + : 2; + config.timeOfFirstRequest = config.timeOfFirstRequest + ? config.timeOfFirstRequest + : Date.now(); + config.totalTimeout = config.totalTimeout + ? config.totalTimeout + : Number.MAX_SAFE_INTEGER; + config.maxRetryDelay = config.maxRetryDelay + ? config.maxRetryDelay + : Number.MAX_SAFE_INTEGER; + // If this wasn't in the list of status codes where we want + // to automatically retry, return. + const retryRanges = [ + // https://en.wikipedia.org/wiki/List_of_HTTP_status_codes + // 1xx - Retry (Informational, request still processing) + // 2xx - Do not retry (Success) + // 3xx - Do not retry (Redirect) + // 4xx - Do not retry (Client errors) + // 408 - Retry ("Request Timeout") + // 429 - Retry ("Too Many Requests") + // 5xx - Retry (Server errors) + [100, 199], + [408, 408], + [429, 429], + [500, 599], + ]; + config.statusCodesToRetry = config.statusCodesToRetry || retryRanges; + // Put the config back into the err + err.config.retryConfig = config; + // Determine if we should retry the request + const shouldRetryFn = config.shouldRetry || shouldRetryRequest; + if (!(await shouldRetryFn(err))) { + return { shouldRetry: false, config: err.config }; + } + const delay = getNextRetryDelay(config); + // We're going to retry! Incremenent the counter. + err.config.retryConfig.currentRetryAttempt += 1; + // Create a promise that invokes the retry after the backOffDelay + const backoff = config.retryBackoff + ? config.retryBackoff(err, delay) + : new Promise(resolve => { + setTimeout(resolve, delay); + }); + // Notify the user if they added an `onRetryAttempt` handler + if (config.onRetryAttempt) { + config.onRetryAttempt(err); + } + // Return the promise in which recalls Gaxios to retry the request + await backoff; + return { shouldRetry: true, config: err.config }; +} +/** + * Determine based on config if we should retry the request. + * @param err The GaxiosError passed to the interceptor. + */ +function shouldRetryRequest(err) { + var _a; + const config = getConfig(err); + // node-fetch raises an AbortError if signaled: + // https://github.com/bitinn/node-fetch#request-cancellation-with-abortsignal + if (err.name === 'AbortError' || ((_a = err.error) === null || _a === void 0 ? void 0 : _a.name) === 'AbortError') { + return false; + } + // If there's no config, or retries are disabled, return. + if (!config || config.retry === 0) { + return false; + } + // Check if this error has no response (ETIMEDOUT, ENOTFOUND, etc) + if (!err.response && + (config.currentRetryAttempt || 0) >= config.noResponseRetries) { + return false; + } + // Only retry with configured HttpMethods. + if (!err.config.method || + config.httpMethodsToRetry.indexOf(err.config.method.toUpperCase()) < 0) { + return false; + } + // If this wasn't in the list of status codes where we want + // to automatically retry, return. + if (err.response && err.response.status) { + let isInRange = false; + for (const [min, max] of config.statusCodesToRetry) { + const status = err.response.status; + if (status >= min && status <= max) { + isInRange = true; + break; + } + } + if (!isInRange) { + return false; + } + } + // If we are out of retry attempts, return + config.currentRetryAttempt = config.currentRetryAttempt || 0; + if (config.currentRetryAttempt >= config.retry) { + return false; + } + return true; +} +/** + * Acquire the raxConfig object from an GaxiosError if available. + * @param err The Gaxios error with a config object. + */ +function getConfig(err) { + if (err && err.config && err.config.retryConfig) { + return err.config.retryConfig; + } + return; +} +/** + * Gets the delay to wait before the next retry. + * + * @param {RetryConfig} config The current set of retry options + * @returns {number} the amount of ms to wait before the next retry attempt. + */ +function getNextRetryDelay(config) { + var _a; + // Calculate time to wait with exponential backoff. + // If this is the first retry, look for a configured retryDelay. + const retryDelay = config.currentRetryAttempt ? 0 : (_a = config.retryDelay) !== null && _a !== void 0 ? _a : 100; + // Formula: retryDelay + ((retryDelayMultiplier^currentRetryAttempt - 1 / 2) * 1000) + const calculatedDelay = retryDelay + + ((Math.pow(config.retryDelayMultiplier, config.currentRetryAttempt) - 1) / + 2) * + 1000; + const maxAllowableDelay = config.totalTimeout - (Date.now() - config.timeOfFirstRequest); + return Math.min(calculatedDelay, maxAllowableDelay, config.maxRetryDelay); +} +//# sourceMappingURL=retry.js.map + +/***/ }), + +/***/ 3155: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2023 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.pkg = void 0; +exports.pkg = __nccwpck_require__(6495); +//# sourceMappingURL=util.js.map + +/***/ }), + +/***/ 3187: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +Object.defineProperty(exports, "NIL", ({ + enumerable: true, + get: function () { + return _nil.default; + } +})); +Object.defineProperty(exports, "parse", ({ + enumerable: true, + get: function () { + return _parse.default; + } +})); +Object.defineProperty(exports, "stringify", ({ + enumerable: true, + get: function () { + return _stringify.default; + } +})); +Object.defineProperty(exports, "v1", ({ + enumerable: true, + get: function () { + return _v.default; + } +})); +Object.defineProperty(exports, "v3", ({ + enumerable: true, + get: function () { + return _v2.default; + } +})); +Object.defineProperty(exports, "v4", ({ + enumerable: true, + get: function () { + return _v3.default; + } +})); +Object.defineProperty(exports, "v5", ({ + enumerable: true, + get: function () { + return _v4.default; + } +})); +Object.defineProperty(exports, "validate", ({ + enumerable: true, + get: function () { + return _validate.default; + } +})); +Object.defineProperty(exports, "version", ({ + enumerable: true, + get: function () { + return _version.default; + } +})); + +var _v = _interopRequireDefault(__nccwpck_require__(6698)); + +var _v2 = _interopRequireDefault(__nccwpck_require__(6272)); + +var _v3 = _interopRequireDefault(__nccwpck_require__(8485)); + +var _v4 = _interopRequireDefault(__nccwpck_require__(742)); + +var _nil = _interopRequireDefault(__nccwpck_require__(8212)); + +var _version = _interopRequireDefault(__nccwpck_require__(6651)); + +var _validate = _interopRequireDefault(__nccwpck_require__(5913)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(4386)); + +var _parse = _interopRequireDefault(__nccwpck_require__(9884)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/***/ }), + +/***/ 1979: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); +} + +var _default = md5; +exports["default"] = _default; + +/***/ }), + +/***/ 3968: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var _default = { + randomUUID: _crypto.default.randomUUID +}; +exports["default"] = _default; + +/***/ }), + +/***/ 8212: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports["default"] = _default; + +/***/ }), + +/***/ 9884: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(5913)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports["default"] = _default; + +/***/ }), + +/***/ 956: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports["default"] = _default; + +/***/ }), + +/***/ 4566: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = rng; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; + +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); + + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} + +/***/ }), + +/***/ 5398: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('sha1').update(bytes).digest(); +} + +var _default = sha1; +exports["default"] = _default; + +/***/ }), + +/***/ 4386: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +exports.unsafeStringify = unsafeStringify; + +var _validate = _interopRequireDefault(__nccwpck_require__(5913)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).slice(1)); +} + +function unsafeStringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]; +} + +function stringify(arr, offset = 0) { + const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports["default"] = _default; + +/***/ }), + +/***/ 6698: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(4566)); + +var _stringify = __nccwpck_require__(4386); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.unsafeStringify)(b); +} + +var _default = v1; +exports["default"] = _default; + +/***/ }), + +/***/ 6272: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(2781)); + +var _md = _interopRequireDefault(__nccwpck_require__(1979)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports["default"] = _default; + +/***/ }), + +/***/ 2781: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports.URL = exports.DNS = void 0; +exports["default"] = v35; + +var _stringify = __nccwpck_require__(4386); + +var _parse = _interopRequireDefault(__nccwpck_require__(9884)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function v35(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + var _namespace; + + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} + +/***/ }), + +/***/ 8485: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _native = _interopRequireDefault(__nccwpck_require__(3968)); + +var _rng = _interopRequireDefault(__nccwpck_require__(4566)); + +var _stringify = __nccwpck_require__(4386); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + if (_native.default.randomUUID && !buf && !options) { + return _native.default.randomUUID(); + } + + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(rnds); +} + +var _default = v4; +exports["default"] = _default; + +/***/ }), + +/***/ 742: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(2781)); + +var _sha = _interopRequireDefault(__nccwpck_require__(5398)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports["default"] = _default; + +/***/ }), + +/***/ 5913: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _regex = _interopRequireDefault(__nccwpck_require__(956)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports["default"] = _default; + +/***/ }), + +/***/ 6651: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(5913)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.slice(14, 15), 16); +} + +var _default = version; +exports["default"] = _default; + +/***/ }), + +/***/ 381: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GCE_LINUX_BIOS_PATHS = void 0; +exports.isGoogleCloudServerless = isGoogleCloudServerless; +exports.isGoogleComputeEngineLinux = isGoogleComputeEngineLinux; +exports.isGoogleComputeEngineMACAddress = isGoogleComputeEngineMACAddress; +exports.isGoogleComputeEngine = isGoogleComputeEngine; +exports.detectGCPResidency = detectGCPResidency; +const fs_1 = __nccwpck_require__(9896); +const os_1 = __nccwpck_require__(857); +/** + * Known paths unique to Google Compute Engine Linux instances + */ +exports.GCE_LINUX_BIOS_PATHS = { + BIOS_DATE: '/sys/class/dmi/id/bios_date', + BIOS_VENDOR: '/sys/class/dmi/id/bios_vendor', +}; +const GCE_MAC_ADDRESS_REGEX = /^42:01/; +/** + * Determines if the process is running on a Google Cloud Serverless environment (Cloud Run or Cloud Functions instance). + * + * Uses the: + * - {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. + * - {@link https://cloud.google.com/functions/docs/env-var Cloud Functions environment variables}. + * + * @returns {boolean} `true` if the process is running on GCP serverless, `false` otherwise. + */ +function isGoogleCloudServerless() { + /** + * `CLOUD_RUN_JOB` is used for Cloud Run Jobs + * - See {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. + * + * `FUNCTION_NAME` is used in older Cloud Functions environments: + * - See {@link https://cloud.google.com/functions/docs/env-var Python 3.7 and Go 1.11}. + * + * `K_SERVICE` is used in Cloud Run and newer Cloud Functions environments: + * - See {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. + * - See {@link https://cloud.google.com/functions/docs/env-var Cloud Functions newer runtimes}. + */ + const isGFEnvironment = process.env.CLOUD_RUN_JOB || + process.env.FUNCTION_NAME || + process.env.K_SERVICE; + return !!isGFEnvironment; +} +/** + * Determines if the process is running on a Linux Google Compute Engine instance. + * + * @returns {boolean} `true` if the process is running on Linux GCE, `false` otherwise. + */ +function isGoogleComputeEngineLinux() { + if ((0, os_1.platform)() !== 'linux') + return false; + try { + // ensure this file exist + (0, fs_1.statSync)(exports.GCE_LINUX_BIOS_PATHS.BIOS_DATE); + // ensure this file exist and matches + const biosVendor = (0, fs_1.readFileSync)(exports.GCE_LINUX_BIOS_PATHS.BIOS_VENDOR, 'utf8'); + return /Google/.test(biosVendor); + } + catch (_a) { + return false; + } +} +/** + * Determines if the process is running on a Google Compute Engine instance with a known + * MAC address. + * + * @returns {boolean} `true` if the process is running on GCE (as determined by MAC address), `false` otherwise. + */ +function isGoogleComputeEngineMACAddress() { + const interfaces = (0, os_1.networkInterfaces)(); + for (const item of Object.values(interfaces)) { + if (!item) + continue; + for (const { mac } of item) { + if (GCE_MAC_ADDRESS_REGEX.test(mac)) { + return true; + } + } + } + return false; +} +/** + * Determines if the process is running on a Google Compute Engine instance. + * + * @returns {boolean} `true` if the process is running on GCE, `false` otherwise. + */ +function isGoogleComputeEngine() { + return isGoogleComputeEngineLinux() || isGoogleComputeEngineMACAddress(); +} +/** + * Determines if the process is running on Google Cloud Platform. + * + * @returns {boolean} `true` if the process is running on GCP, `false` otherwise. + */ +function detectGCPResidency() { + return isGoogleCloudServerless() || isGoogleComputeEngine(); +} +//# sourceMappingURL=gcp-residency.js.map + +/***/ }), + +/***/ 3046: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +/** + * Copyright 2018 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.gcpResidencyCache = exports.METADATA_SERVER_DETECTION = exports.HEADERS = exports.HEADER_VALUE = exports.HEADER_NAME = exports.SECONDARY_HOST_ADDRESS = exports.HOST_ADDRESS = exports.BASE_PATH = void 0; +exports.instance = instance; +exports.project = project; +exports.universe = universe; +exports.bulk = bulk; +exports.isAvailable = isAvailable; +exports.resetIsAvailableCache = resetIsAvailableCache; +exports.getGCPResidency = getGCPResidency; +exports.setGCPResidency = setGCPResidency; +exports.requestTimeout = requestTimeout; +const gaxios_1 = __nccwpck_require__(7003); +const jsonBigint = __nccwpck_require__(4826); +const gcp_residency_1 = __nccwpck_require__(381); +const logger = __nccwpck_require__(1577); +exports.BASE_PATH = '/computeMetadata/v1'; +exports.HOST_ADDRESS = 'http://169.254.169.254'; +exports.SECONDARY_HOST_ADDRESS = 'http://metadata.google.internal.'; +exports.HEADER_NAME = 'Metadata-Flavor'; +exports.HEADER_VALUE = 'Google'; +exports.HEADERS = Object.freeze({ [exports.HEADER_NAME]: exports.HEADER_VALUE }); +const log = logger.log('gcp metadata'); +/** + * Metadata server detection override options. + * + * Available via `process.env.METADATA_SERVER_DETECTION`. + */ +exports.METADATA_SERVER_DETECTION = Object.freeze({ + 'assume-present': "don't try to ping the metadata server, but assume it's present", + none: "don't try to ping the metadata server, but don't try to use it either", + 'bios-only': "treat the result of a BIOS probe as canonical (don't fall back to pinging)", + 'ping-only': 'skip the BIOS probe, and go straight to pinging', +}); +/** + * Returns the base URL while taking into account the GCE_METADATA_HOST + * environment variable if it exists. + * + * @returns The base URL, e.g., http://169.254.169.254/computeMetadata/v1. + */ +function getBaseUrl(baseUrl) { + if (!baseUrl) { + baseUrl = + process.env.GCE_METADATA_IP || + process.env.GCE_METADATA_HOST || + exports.HOST_ADDRESS; + } + // If no scheme is provided default to HTTP: + if (!/^https?:\/\//.test(baseUrl)) { + baseUrl = `http://${baseUrl}`; + } + return new URL(exports.BASE_PATH, baseUrl).href; +} +// Accepts an options object passed from the user to the API. In previous +// versions of the API, it referred to a `Request` or an `Axios` request +// options object. Now it refers to an object with very limited property +// names. This is here to help ensure users don't pass invalid options when +// they upgrade from 0.4 to 0.5 to 0.8. +function validate(options) { + Object.keys(options).forEach(key => { + switch (key) { + case 'params': + case 'property': + case 'headers': + break; + case 'qs': + throw new Error("'qs' is not a valid configuration option. Please use 'params' instead."); + default: + throw new Error(`'${key}' is not a valid configuration option.`); + } + }); +} +async function metadataAccessor(type, options = {}, noResponseRetries = 3, fastFail = false) { + let metadataKey = ''; + let params = {}; + let headers = {}; + if (typeof type === 'object') { + const metadataAccessor = type; + metadataKey = metadataAccessor.metadataKey; + params = metadataAccessor.params || params; + headers = metadataAccessor.headers || headers; + noResponseRetries = metadataAccessor.noResponseRetries || noResponseRetries; + fastFail = metadataAccessor.fastFail || fastFail; + } + else { + metadataKey = type; + } + if (typeof options === 'string') { + metadataKey += `/${options}`; + } + else { + validate(options); + if (options.property) { + metadataKey += `/${options.property}`; + } + headers = options.headers || headers; + params = options.params || params; + } + const requestMethod = fastFail ? fastFailMetadataRequest : gaxios_1.request; + const req = { + url: `${getBaseUrl()}/${metadataKey}`, + headers: { ...exports.HEADERS, ...headers }, + retryConfig: { noResponseRetries }, + params, + responseType: 'text', + timeout: requestTimeout(), + }; + log.info('instance request %j', req); + const res = await requestMethod(req); + log.info('instance metadata is %s', res.data); + // NOTE: node.js converts all incoming headers to lower case. + if (res.headers[exports.HEADER_NAME.toLowerCase()] !== exports.HEADER_VALUE) { + throw new Error(`Invalid response from metadata service: incorrect ${exports.HEADER_NAME} header. Expected '${exports.HEADER_VALUE}', got ${res.headers[exports.HEADER_NAME.toLowerCase()] ? `'${res.headers[exports.HEADER_NAME.toLowerCase()]}'` : 'no header'}`); + } + if (typeof res.data === 'string') { + try { + return jsonBigint.parse(res.data); + } + catch (_a) { + /* ignore */ + } + } + return res.data; +} +async function fastFailMetadataRequest(options) { + var _a; + const secondaryOptions = { + ...options, + url: (_a = options.url) === null || _a === void 0 ? void 0 : _a.toString().replace(getBaseUrl(), getBaseUrl(exports.SECONDARY_HOST_ADDRESS)), + }; + // We race a connection between DNS/IP to metadata server. There are a couple + // reasons for this: + // + // 1. the DNS is slow in some GCP environments; by checking both, we might + // detect the runtime environment signficantly faster. + // 2. we can't just check the IP, which is tarpitted and slow to respond + // on a user's local machine. + // + // Additional logic has been added to make sure that we don't create an + // unhandled rejection in scenarios where a failure happens sometime + // after a success. + // + // Note, however, if a failure happens prior to a success, a rejection should + // occur, this is for folks running locally. + // + let responded = false; + const r1 = (0, gaxios_1.request)(options) + .then(res => { + responded = true; + return res; + }) + .catch(err => { + if (responded) { + return r2; + } + else { + responded = true; + throw err; + } + }); + const r2 = (0, gaxios_1.request)(secondaryOptions) + .then(res => { + responded = true; + return res; + }) + .catch(err => { + if (responded) { + return r1; + } + else { + responded = true; + throw err; + } + }); + return Promise.race([r1, r2]); +} +/** + * Obtain metadata for the current GCE instance. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const serviceAccount: {} = await instance('service-accounts/'); + * const serviceAccountEmail: string = await instance('service-accounts/default/email'); + * ``` + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function instance(options) { + return metadataAccessor('instance', options); +} +/** + * Obtain metadata for the current GCP project. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const projectId: string = await project('project-id'); + * const numericProjectId: number = await project('numeric-project-id'); + * ``` + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function project(options) { + return metadataAccessor('project', options); +} +/** + * Obtain metadata for the current universe. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const universeDomain: string = await universe('universe-domain'); + * ``` + */ +function universe(options) { + return metadataAccessor('universe', options); +} +/** + * Retrieve metadata items in parallel. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const data = await bulk([ + * { + * metadataKey: 'instance', + * }, + * { + * metadataKey: 'project/project-id', + * }, + * ] as const); + * + * // data.instance; + * // data['project/project-id']; + * ``` + * + * @param properties The metadata properties to retrieve + * @returns The metadata in `metadatakey:value` format + */ +async function bulk(properties) { + const r = {}; + await Promise.all(properties.map(item => { + return (async () => { + const res = await metadataAccessor(item); + const key = item.metadataKey; + r[key] = res; + })(); + })); + return r; +} +/* + * How many times should we retry detecting GCP environment. + */ +function detectGCPAvailableRetries() { + return process.env.DETECT_GCP_RETRIES + ? Number(process.env.DETECT_GCP_RETRIES) + : 0; +} +let cachedIsAvailableResponse; +/** + * Determine if the metadata server is currently available. + */ +async function isAvailable() { + if (process.env.METADATA_SERVER_DETECTION) { + const value = process.env.METADATA_SERVER_DETECTION.trim().toLocaleLowerCase(); + if (!(value in exports.METADATA_SERVER_DETECTION)) { + throw new RangeError(`Unknown \`METADATA_SERVER_DETECTION\` env variable. Got \`${value}\`, but it should be \`${Object.keys(exports.METADATA_SERVER_DETECTION).join('`, `')}\`, or unset`); + } + switch (value) { + case 'assume-present': + return true; + case 'none': + return false; + case 'bios-only': + return getGCPResidency(); + case 'ping-only': + // continue, we want to ping the server + } + } + try { + // If a user is instantiating several GCP libraries at the same time, + // this may result in multiple calls to isAvailable(), to detect the + // runtime environment. We use the same promise for each of these calls + // to reduce the network load. + if (cachedIsAvailableResponse === undefined) { + cachedIsAvailableResponse = metadataAccessor('instance', undefined, detectGCPAvailableRetries(), + // If the default HOST_ADDRESS has been overridden, we should not + // make an effort to try SECONDARY_HOST_ADDRESS (as we are likely in + // a non-GCP environment): + !(process.env.GCE_METADATA_IP || process.env.GCE_METADATA_HOST)); + } + await cachedIsAvailableResponse; + return true; + } + catch (e) { + const err = e; + if (process.env.DEBUG_AUTH) { + console.info(err); + } + if (err.type === 'request-timeout') { + // If running in a GCP environment, metadata endpoint should return + // within ms. + return false; + } + if (err.response && err.response.status === 404) { + return false; + } + else { + if (!(err.response && err.response.status === 404) && + // A warning is emitted if we see an unexpected err.code, or err.code + // is not populated: + (!err.code || + ![ + 'EHOSTDOWN', + 'EHOSTUNREACH', + 'ENETUNREACH', + 'ENOENT', + 'ENOTFOUND', + 'ECONNREFUSED', + ].includes(err.code))) { + let code = 'UNKNOWN'; + if (err.code) + code = err.code; + process.emitWarning(`received unexpected error = ${err.message} code = ${code}`, 'MetadataLookupWarning'); + } + // Failure to resolve the metadata service means that it is not available. + return false; + } + } +} +/** + * reset the memoized isAvailable() lookup. + */ +function resetIsAvailableCache() { + cachedIsAvailableResponse = undefined; +} +/** + * A cache for the detected GCP Residency. + */ +exports.gcpResidencyCache = null; +/** + * Detects GCP Residency. + * Caches results to reduce costs for subsequent calls. + * + * @see setGCPResidency for setting + */ +function getGCPResidency() { + if (exports.gcpResidencyCache === null) { + setGCPResidency(); + } + return exports.gcpResidencyCache; +} +/** + * Sets the detected GCP Residency. + * Useful for forcing metadata server detection behavior. + * + * Set `null` to autodetect the environment (default behavior). + * @see getGCPResidency for getting + */ +function setGCPResidency(value = null) { + exports.gcpResidencyCache = value !== null ? value : (0, gcp_residency_1.detectGCPResidency)(); +} +/** + * Obtain the timeout for requests to the metadata server. + * + * In certain environments and conditions requests can take longer than + * the default timeout to complete. This function will determine the + * appropriate timeout based on the environment. + * + * @returns {number} a request timeout duration in milliseconds. + */ +function requestTimeout() { + return getGCPResidency() ? 0 : 3000; +} +__exportStar(__nccwpck_require__(381), exports); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 4810: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2012 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AuthClient = exports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS = exports.DEFAULT_UNIVERSE = void 0; +const events_1 = __nccwpck_require__(4434); +const gaxios_1 = __nccwpck_require__(7003); +const transporters_1 = __nccwpck_require__(7633); +const util_1 = __nccwpck_require__(7870); +/** + * The default cloud universe + * + * @see {@link AuthJSONOptions.universe_domain} + */ +exports.DEFAULT_UNIVERSE = 'googleapis.com'; +/** + * The default {@link AuthClientOptions.eagerRefreshThresholdMillis} + */ +exports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS = 5 * 60 * 1000; +class AuthClient extends events_1.EventEmitter { + constructor(opts = {}) { + var _a, _b, _c, _d, _e; + super(); + this.credentials = {}; + this.eagerRefreshThresholdMillis = exports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS; + this.forceRefreshOnFailure = false; + this.universeDomain = exports.DEFAULT_UNIVERSE; + const options = (0, util_1.originalOrCamelOptions)(opts); + // Shared auth options + this.apiKey = opts.apiKey; + this.projectId = (_a = options.get('project_id')) !== null && _a !== void 0 ? _a : null; + this.quotaProjectId = options.get('quota_project_id'); + this.credentials = (_b = options.get('credentials')) !== null && _b !== void 0 ? _b : {}; + this.universeDomain = (_c = options.get('universe_domain')) !== null && _c !== void 0 ? _c : exports.DEFAULT_UNIVERSE; + // Shared client options + this.transporter = (_d = opts.transporter) !== null && _d !== void 0 ? _d : new transporters_1.DefaultTransporter(); + if (opts.transporterOptions) { + this.transporter.defaults = opts.transporterOptions; + } + if (opts.eagerRefreshThresholdMillis) { + this.eagerRefreshThresholdMillis = opts.eagerRefreshThresholdMillis; + } + this.forceRefreshOnFailure = (_e = opts.forceRefreshOnFailure) !== null && _e !== void 0 ? _e : false; + } + /** + * Return the {@link Gaxios `Gaxios`} instance from the {@link AuthClient.transporter}. + * + * @expiremental + */ + get gaxios() { + if (this.transporter instanceof gaxios_1.Gaxios) { + return this.transporter; + } + else if (this.transporter instanceof transporters_1.DefaultTransporter) { + return this.transporter.instance; + } + else if ('instance' in this.transporter && + this.transporter.instance instanceof gaxios_1.Gaxios) { + return this.transporter.instance; + } + return null; + } + /** + * Sets the auth credentials. + */ + setCredentials(credentials) { + this.credentials = credentials; + } + /** + * Append additional headers, e.g., x-goog-user-project, shared across the + * classes inheriting AuthClient. This method should be used by any method + * that overrides getRequestMetadataAsync(), which is a shared helper for + * setting request information in both gRPC and HTTP API calls. + * + * @param headers object to append additional headers to. + */ + addSharedMetadataHeaders(headers) { + // quota_project_id, stored in application_default_credentials.json, is set in + // the x-goog-user-project header, to indicate an alternate account for + // billing and quota: + if (!headers['x-goog-user-project'] && // don't override a value the user sets. + this.quotaProjectId) { + headers['x-goog-user-project'] = this.quotaProjectId; + } + return headers; + } + /** + * Retry config for Auth-related requests. + * + * @remarks + * + * This is not a part of the default {@link AuthClient.transporter transporter/gaxios} + * config as some downstream APIs would prefer if customers explicitly enable retries, + * such as GCS. + */ + static get RETRY_CONFIG() { + return { + retry: true, + retryConfig: { + httpMethodsToRetry: ['GET', 'PUT', 'POST', 'HEAD', 'OPTIONS', 'DELETE'], + }, + }; + } +} +exports.AuthClient = AuthClient; + + +/***/ }), + +/***/ 1261: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _a, _AwsClient_DEFAULT_AWS_REGIONAL_CREDENTIAL_VERIFICATION_URL; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AwsClient = void 0; +const awsrequestsigner_1 = __nccwpck_require__(7647); +const baseexternalclient_1 = __nccwpck_require__(142); +const defaultawssecuritycredentialssupplier_1 = __nccwpck_require__(9157); +const util_1 = __nccwpck_require__(7870); +/** + * AWS external account client. This is used for AWS workloads, where + * AWS STS GetCallerIdentity serialized signed requests are exchanged for + * GCP access token. + */ +class AwsClient extends baseexternalclient_1.BaseExternalAccountClient { + /** + * Instantiates an AwsClient instance using the provided JSON + * object loaded from an external account credentials file. + * An error is thrown if the credential is not a valid AWS credential. + * @param options The external account options object typically loaded + * from the external account JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + super(options, additionalOptions); + const opts = (0, util_1.originalOrCamelOptions)(options); + const credentialSource = opts.get('credential_source'); + const awsSecurityCredentialsSupplier = opts.get('aws_security_credentials_supplier'); + // Validate credential sourcing configuration. + if (!credentialSource && !awsSecurityCredentialsSupplier) { + throw new Error('A credential source or AWS security credentials supplier must be specified.'); + } + if (credentialSource && awsSecurityCredentialsSupplier) { + throw new Error('Only one of credential source or AWS security credentials supplier can be specified.'); + } + if (awsSecurityCredentialsSupplier) { + this.awsSecurityCredentialsSupplier = awsSecurityCredentialsSupplier; + this.regionalCredVerificationUrl = + __classPrivateFieldGet(_a, _a, "f", _AwsClient_DEFAULT_AWS_REGIONAL_CREDENTIAL_VERIFICATION_URL); + this.credentialSourceType = 'programmatic'; + } + else { + const credentialSourceOpts = (0, util_1.originalOrCamelOptions)(credentialSource); + this.environmentId = credentialSourceOpts.get('environment_id'); + // This is only required if the AWS region is not available in the + // AWS_REGION or AWS_DEFAULT_REGION environment variables. + const regionUrl = credentialSourceOpts.get('region_url'); + // This is only required if AWS security credentials are not available in + // environment variables. + const securityCredentialsUrl = credentialSourceOpts.get('url'); + const imdsV2SessionTokenUrl = credentialSourceOpts.get('imdsv2_session_token_url'); + this.awsSecurityCredentialsSupplier = + new defaultawssecuritycredentialssupplier_1.DefaultAwsSecurityCredentialsSupplier({ + regionUrl: regionUrl, + securityCredentialsUrl: securityCredentialsUrl, + imdsV2SessionTokenUrl: imdsV2SessionTokenUrl, + }); + this.regionalCredVerificationUrl = credentialSourceOpts.get('regional_cred_verification_url'); + this.credentialSourceType = 'aws'; + // Data validators. + this.validateEnvironmentId(); + } + this.awsRequestSigner = null; + this.region = ''; + } + validateEnvironmentId() { + var _b; + const match = (_b = this.environmentId) === null || _b === void 0 ? void 0 : _b.match(/^(aws)(\d+)$/); + if (!match || !this.regionalCredVerificationUrl) { + throw new Error('No valid AWS "credential_source" provided'); + } + else if (parseInt(match[2], 10) !== 1) { + throw new Error(`aws version "${match[2]}" is not supported in the current build.`); + } + } + /** + * Triggered when an external subject token is needed to be exchanged for a + * GCP access token via GCP STS endpoint. This will call the + * {@link AwsSecurityCredentialsSupplier} to retrieve an AWS region and AWS + * Security Credentials, then use them to create a signed AWS STS request that + * can be exchanged for a GCP access token. + * @return A promise that resolves with the external subject token. + */ + async retrieveSubjectToken() { + // Initialize AWS request signer if not already initialized. + if (!this.awsRequestSigner) { + this.region = await this.awsSecurityCredentialsSupplier.getAwsRegion(this.supplierContext); + this.awsRequestSigner = new awsrequestsigner_1.AwsRequestSigner(async () => { + return this.awsSecurityCredentialsSupplier.getAwsSecurityCredentials(this.supplierContext); + }, this.region); + } + // Generate signed request to AWS STS GetCallerIdentity API. + // Use the required regional endpoint. Otherwise, the request will fail. + const options = await this.awsRequestSigner.getRequestOptions({ + ..._a.RETRY_CONFIG, + url: this.regionalCredVerificationUrl.replace('{region}', this.region), + method: 'POST', + }); + // The GCP STS endpoint expects the headers to be formatted as: + // [ + // {key: 'x-amz-date', value: '...'}, + // {key: 'Authorization', value: '...'}, + // ... + // ] + // And then serialized as: + // encodeURIComponent(JSON.stringify({ + // url: '...', + // method: 'POST', + // headers: [{key: 'x-amz-date', value: '...'}, ...] + // })) + const reformattedHeader = []; + const extendedHeaders = Object.assign({ + // The full, canonical resource name of the workload identity pool + // provider, with or without the HTTPS prefix. + // Including this header as part of the signature is recommended to + // ensure data integrity. + 'x-goog-cloud-target-resource': this.audience, + }, options.headers); + // Reformat header to GCP STS expected format. + for (const key in extendedHeaders) { + reformattedHeader.push({ + key, + value: extendedHeaders[key], + }); + } + // Serialize the reformatted signed request. + return encodeURIComponent(JSON.stringify({ + url: options.url, + method: options.method, + headers: reformattedHeader, + })); + } +} +exports.AwsClient = AwsClient; +_a = AwsClient; +_AwsClient_DEFAULT_AWS_REGIONAL_CREDENTIAL_VERIFICATION_URL = { value: 'https://sts.{region}.amazonaws.com?Action=GetCallerIdentity&Version=2011-06-15' }; +/** + * @deprecated AWS client no validates the EC2 metadata address. + **/ +AwsClient.AWS_EC2_METADATA_IPV4_ADDRESS = '169.254.169.254'; +/** + * @deprecated AWS client no validates the EC2 metadata address. + **/ +AwsClient.AWS_EC2_METADATA_IPV6_ADDRESS = 'fd00:ec2::254'; + + +/***/ }), + +/***/ 7647: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AwsRequestSigner = void 0; +const crypto_1 = __nccwpck_require__(8851); +/** AWS Signature Version 4 signing algorithm identifier. */ +const AWS_ALGORITHM = 'AWS4-HMAC-SHA256'; +/** + * The termination string for the AWS credential scope value as defined in + * https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html + */ +const AWS_REQUEST_TYPE = 'aws4_request'; +/** + * Implements an AWS API request signer based on the AWS Signature Version 4 + * signing process. + * https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html + */ +class AwsRequestSigner { + /** + * Instantiates an AWS API request signer used to send authenticated signed + * requests to AWS APIs based on the AWS Signature Version 4 signing process. + * This also provides a mechanism to generate the signed request without + * sending it. + * @param getCredentials A mechanism to retrieve AWS security credentials + * when needed. + * @param region The AWS region to use. + */ + constructor(getCredentials, region) { + this.getCredentials = getCredentials; + this.region = region; + this.crypto = (0, crypto_1.createCrypto)(); + } + /** + * Generates the signed request for the provided HTTP request for calling + * an AWS API. This follows the steps described at: + * https://docs.aws.amazon.com/general/latest/gr/sigv4_signing.html + * @param amzOptions The AWS request options that need to be signed. + * @return A promise that resolves with the GaxiosOptions containing the + * signed HTTP request parameters. + */ + async getRequestOptions(amzOptions) { + if (!amzOptions.url) { + throw new Error('"url" is required in "amzOptions"'); + } + // Stringify JSON requests. This will be set in the request body of the + // generated signed request. + const requestPayloadData = typeof amzOptions.data === 'object' + ? JSON.stringify(amzOptions.data) + : amzOptions.data; + const url = amzOptions.url; + const method = amzOptions.method || 'GET'; + const requestPayload = amzOptions.body || requestPayloadData; + const additionalAmzHeaders = amzOptions.headers; + const awsSecurityCredentials = await this.getCredentials(); + const uri = new URL(url); + const headerMap = await generateAuthenticationHeaderMap({ + crypto: this.crypto, + host: uri.host, + canonicalUri: uri.pathname, + canonicalQuerystring: uri.search.substr(1), + method, + region: this.region, + securityCredentials: awsSecurityCredentials, + requestPayload, + additionalAmzHeaders, + }); + // Append additional optional headers, eg. X-Amz-Target, Content-Type, etc. + const headers = Object.assign( + // Add x-amz-date if available. + headerMap.amzDate ? { 'x-amz-date': headerMap.amzDate } : {}, { + Authorization: headerMap.authorizationHeader, + host: uri.host, + }, additionalAmzHeaders || {}); + if (awsSecurityCredentials.token) { + Object.assign(headers, { + 'x-amz-security-token': awsSecurityCredentials.token, + }); + } + const awsSignedReq = { + url, + method: method, + headers, + }; + if (typeof requestPayload !== 'undefined') { + awsSignedReq.body = requestPayload; + } + return awsSignedReq; + } +} +exports.AwsRequestSigner = AwsRequestSigner; +/** + * Creates the HMAC-SHA256 hash of the provided message using the + * provided key. + * + * @param crypto The crypto instance used to facilitate cryptographic + * operations. + * @param key The HMAC-SHA256 key to use. + * @param msg The message to hash. + * @return The computed hash bytes. + */ +async function sign(crypto, key, msg) { + return await crypto.signWithHmacSha256(key, msg); +} +/** + * Calculates the signing key used to calculate the signature for + * AWS Signature Version 4 based on: + * https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html + * + * @param crypto The crypto instance used to facilitate cryptographic + * operations. + * @param key The AWS secret access key. + * @param dateStamp The '%Y%m%d' date format. + * @param region The AWS region. + * @param serviceName The AWS service name, eg. sts. + * @return The signing key bytes. + */ +async function getSigningKey(crypto, key, dateStamp, region, serviceName) { + const kDate = await sign(crypto, `AWS4${key}`, dateStamp); + const kRegion = await sign(crypto, kDate, region); + const kService = await sign(crypto, kRegion, serviceName); + const kSigning = await sign(crypto, kService, 'aws4_request'); + return kSigning; +} +/** + * Generates the authentication header map needed for generating the AWS + * Signature Version 4 signed request. + * + * @param option The options needed to compute the authentication header map. + * @return The AWS authentication header map which constitutes of the following + * components: amz-date, authorization header and canonical query string. + */ +async function generateAuthenticationHeaderMap(options) { + const additionalAmzHeaders = options.additionalAmzHeaders || {}; + const requestPayload = options.requestPayload || ''; + // iam.amazonaws.com host => iam service. + // sts.us-east-2.amazonaws.com => sts service. + const serviceName = options.host.split('.')[0]; + const now = new Date(); + // Format: '%Y%m%dT%H%M%SZ'. + const amzDate = now + .toISOString() + .replace(/[-:]/g, '') + .replace(/\.[0-9]+/, ''); + // Format: '%Y%m%d'. + const dateStamp = now.toISOString().replace(/[-]/g, '').replace(/T.*/, ''); + // Change all additional headers to be lower case. + const reformattedAdditionalAmzHeaders = {}; + Object.keys(additionalAmzHeaders).forEach(key => { + reformattedAdditionalAmzHeaders[key.toLowerCase()] = + additionalAmzHeaders[key]; + }); + // Add AWS token if available. + if (options.securityCredentials.token) { + reformattedAdditionalAmzHeaders['x-amz-security-token'] = + options.securityCredentials.token; + } + // Header keys need to be sorted alphabetically. + const amzHeaders = Object.assign({ + host: options.host, + }, + // Previously the date was not fixed with x-amz- and could be provided manually. + // https://github.com/boto/botocore/blob/879f8440a4e9ace5d3cf145ce8b3d5e5ffb892ef/tests/unit/auth/aws4_testsuite/get-header-value-trim.req + reformattedAdditionalAmzHeaders.date ? {} : { 'x-amz-date': amzDate }, reformattedAdditionalAmzHeaders); + let canonicalHeaders = ''; + const signedHeadersList = Object.keys(amzHeaders).sort(); + signedHeadersList.forEach(key => { + canonicalHeaders += `${key}:${amzHeaders[key]}\n`; + }); + const signedHeaders = signedHeadersList.join(';'); + const payloadHash = await options.crypto.sha256DigestHex(requestPayload); + // https://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html + const canonicalRequest = `${options.method}\n` + + `${options.canonicalUri}\n` + + `${options.canonicalQuerystring}\n` + + `${canonicalHeaders}\n` + + `${signedHeaders}\n` + + `${payloadHash}`; + const credentialScope = `${dateStamp}/${options.region}/${serviceName}/${AWS_REQUEST_TYPE}`; + // https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html + const stringToSign = `${AWS_ALGORITHM}\n` + + `${amzDate}\n` + + `${credentialScope}\n` + + (await options.crypto.sha256DigestHex(canonicalRequest)); + // https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html + const signingKey = await getSigningKey(options.crypto, options.securityCredentials.secretAccessKey, dateStamp, options.region, serviceName); + const signature = await sign(options.crypto, signingKey, stringToSign); + // https://docs.aws.amazon.com/general/latest/gr/sigv4-add-signature-to-request.html + const authorizationHeader = `${AWS_ALGORITHM} Credential=${options.securityCredentials.accessKeyId}/` + + `${credentialScope}, SignedHeaders=${signedHeaders}, ` + + `Signature=${(0, crypto_1.fromArrayBufferToHex)(signature)}`; + return { + // Do not return x-amz-date if date is available. + amzDate: reformattedAdditionalAmzHeaders.date ? undefined : amzDate, + authorizationHeader, + canonicalQuerystring: options.canonicalQuerystring, + }; +} + + +/***/ }), + +/***/ 142: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var _BaseExternalAccountClient_instances, _BaseExternalAccountClient_pendingAccessToken, _BaseExternalAccountClient_internalRefreshAccessTokenAsync; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.BaseExternalAccountClient = exports.DEFAULT_UNIVERSE = exports.CLOUD_RESOURCE_MANAGER = exports.EXTERNAL_ACCOUNT_TYPE = exports.EXPIRATION_TIME_OFFSET = void 0; +const stream = __nccwpck_require__(2203); +const authclient_1 = __nccwpck_require__(4810); +const sts = __nccwpck_require__(121); +const util_1 = __nccwpck_require__(7870); +/** + * The required token exchange grant_type: rfc8693#section-2.1 + */ +const STS_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:token-exchange'; +/** + * The requested token exchange requested_token_type: rfc8693#section-2.1 + */ +const STS_REQUEST_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; +/** The default OAuth scope to request when none is provided. */ +const DEFAULT_OAUTH_SCOPE = 'https://www.googleapis.com/auth/cloud-platform'; +/** Default impersonated token lifespan in seconds.*/ +const DEFAULT_TOKEN_LIFESPAN = 3600; +/** + * Offset to take into account network delays and server clock skews. + */ +exports.EXPIRATION_TIME_OFFSET = 5 * 60 * 1000; +/** + * The credentials JSON file type for external account clients. + * There are 3 types of JSON configs: + * 1. authorized_user => Google end user credential + * 2. service_account => Google service account credential + * 3. external_Account => non-GCP service (eg. AWS, Azure, K8s) + */ +exports.EXTERNAL_ACCOUNT_TYPE = 'external_account'; +/** + * Cloud resource manager URL used to retrieve project information. + * + * @deprecated use {@link BaseExternalAccountClient.cloudResourceManagerURL} instead + **/ +exports.CLOUD_RESOURCE_MANAGER = 'https://cloudresourcemanager.googleapis.com/v1/projects/'; +/** The workforce audience pattern. */ +const WORKFORCE_AUDIENCE_PATTERN = '//iam\\.googleapis\\.com/locations/[^/]+/workforcePools/[^/]+/providers/.+'; +const DEFAULT_TOKEN_URL = 'https://sts.{universeDomain}/v1/token'; +// eslint-disable-next-line @typescript-eslint/no-var-requires +const pkg = __nccwpck_require__(6066); +/** + * For backwards compatibility. + */ +var authclient_2 = __nccwpck_require__(4810); +Object.defineProperty(exports, "DEFAULT_UNIVERSE", ({ enumerable: true, get: function () { return authclient_2.DEFAULT_UNIVERSE; } })); +/** + * Base external account client. This is used to instantiate AuthClients for + * exchanging external account credentials for GCP access token and authorizing + * requests to GCP APIs. + * The base class implements common logic for exchanging various type of + * external credentials for GCP access token. The logic of determining and + * retrieving the external credential based on the environment and + * credential_source will be left for the subclasses. + */ +class BaseExternalAccountClient extends authclient_1.AuthClient { + /** + * Instantiate a BaseExternalAccountClient instance using the provided JSON + * object loaded from an external account credentials file. + * @param options The external account options object typically loaded + * from the external account JSON credential file. The camelCased options + * are aliases for the snake_cased options. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + var _a; + super({ ...options, ...additionalOptions }); + _BaseExternalAccountClient_instances.add(this); + /** + * A pending access token request. Used for concurrent calls. + */ + _BaseExternalAccountClient_pendingAccessToken.set(this, null); + const opts = (0, util_1.originalOrCamelOptions)(options); + const type = opts.get('type'); + if (type && type !== exports.EXTERNAL_ACCOUNT_TYPE) { + throw new Error(`Expected "${exports.EXTERNAL_ACCOUNT_TYPE}" type but ` + + `received "${options.type}"`); + } + const clientId = opts.get('client_id'); + const clientSecret = opts.get('client_secret'); + const tokenUrl = (_a = opts.get('token_url')) !== null && _a !== void 0 ? _a : DEFAULT_TOKEN_URL.replace('{universeDomain}', this.universeDomain); + const subjectTokenType = opts.get('subject_token_type'); + const workforcePoolUserProject = opts.get('workforce_pool_user_project'); + const serviceAccountImpersonationUrl = opts.get('service_account_impersonation_url'); + const serviceAccountImpersonation = opts.get('service_account_impersonation'); + const serviceAccountImpersonationLifetime = (0, util_1.originalOrCamelOptions)(serviceAccountImpersonation).get('token_lifetime_seconds'); + this.cloudResourceManagerURL = new URL(opts.get('cloud_resource_manager_url') || + `https://cloudresourcemanager.${this.universeDomain}/v1/projects/`); + if (clientId) { + this.clientAuth = { + confidentialClientType: 'basic', + clientId, + clientSecret, + }; + } + this.stsCredential = new sts.StsCredentials(tokenUrl, this.clientAuth); + this.scopes = opts.get('scopes') || [DEFAULT_OAUTH_SCOPE]; + this.cachedAccessToken = null; + this.audience = opts.get('audience'); + this.subjectTokenType = subjectTokenType; + this.workforcePoolUserProject = workforcePoolUserProject; + const workforceAudiencePattern = new RegExp(WORKFORCE_AUDIENCE_PATTERN); + if (this.workforcePoolUserProject && + !this.audience.match(workforceAudiencePattern)) { + throw new Error('workforcePoolUserProject should not be set for non-workforce pool ' + + 'credentials.'); + } + this.serviceAccountImpersonationUrl = serviceAccountImpersonationUrl; + this.serviceAccountImpersonationLifetime = + serviceAccountImpersonationLifetime; + if (this.serviceAccountImpersonationLifetime) { + this.configLifetimeRequested = true; + } + else { + this.configLifetimeRequested = false; + this.serviceAccountImpersonationLifetime = DEFAULT_TOKEN_LIFESPAN; + } + this.projectNumber = this.getProjectNumber(this.audience); + this.supplierContext = { + audience: this.audience, + subjectTokenType: this.subjectTokenType, + transporter: this.transporter, + }; + } + /** The service account email to be impersonated, if available. */ + getServiceAccountEmail() { + var _a; + if (this.serviceAccountImpersonationUrl) { + if (this.serviceAccountImpersonationUrl.length > 256) { + /** + * Prevents DOS attacks. + * @see {@link https://github.com/googleapis/google-auth-library-nodejs/security/code-scanning/84} + **/ + throw new RangeError(`URL is too long: ${this.serviceAccountImpersonationUrl}`); + } + // Parse email from URL. The formal looks as follows: + // https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/name@project-id.iam.gserviceaccount.com:generateAccessToken + const re = /serviceAccounts\/(?[^:]+):generateAccessToken$/; + const result = re.exec(this.serviceAccountImpersonationUrl); + return ((_a = result === null || result === void 0 ? void 0 : result.groups) === null || _a === void 0 ? void 0 : _a.email) || null; + } + return null; + } + /** + * Provides a mechanism to inject GCP access tokens directly. + * When the provided credential expires, a new credential, using the + * external account options, is retrieved. + * @param credentials The Credentials object to set on the current client. + */ + setCredentials(credentials) { + super.setCredentials(credentials); + this.cachedAccessToken = credentials; + } + /** + * @return A promise that resolves with the current GCP access token + * response. If the current credential is expired, a new one is retrieved. + */ + async getAccessToken() { + // If cached access token is unavailable or expired, force refresh. + if (!this.cachedAccessToken || this.isExpired(this.cachedAccessToken)) { + await this.refreshAccessTokenAsync(); + } + // Return GCP access token in GetAccessTokenResponse format. + return { + token: this.cachedAccessToken.access_token, + res: this.cachedAccessToken.res, + }; + } + /** + * The main authentication interface. It takes an optional url which when + * present is the endpoint being accessed, and returns a Promise which + * resolves with authorization header fields. + * + * The result has the form: + * { Authorization: 'Bearer ' } + */ + async getRequestHeaders() { + const accessTokenResponse = await this.getAccessToken(); + const headers = { + Authorization: `Bearer ${accessTokenResponse.token}`, + }; + return this.addSharedMetadataHeaders(headers); + } + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); + }); + } + else { + return this.requestAsync(opts); + } + } + /** + * @return A promise that resolves with the project ID corresponding to the + * current workload identity pool or current workforce pool if + * determinable. For workforce pool credential, it returns the project ID + * corresponding to the workforcePoolUserProject. + * This is introduced to match the current pattern of using the Auth + * library: + * const projectId = await auth.getProjectId(); + * const url = `https://dns.googleapis.com/dns/v1/projects/${projectId}`; + * const res = await client.request({ url }); + * The resource may not have permission + * (resourcemanager.projects.get) to call this API or the required + * scopes may not be selected: + * https://cloud.google.com/resource-manager/reference/rest/v1/projects/get#authorization-scopes + */ + async getProjectId() { + const projectNumber = this.projectNumber || this.workforcePoolUserProject; + if (this.projectId) { + // Return previously determined project ID. + return this.projectId; + } + else if (projectNumber) { + // Preferable not to use request() to avoid retrial policies. + const headers = await this.getRequestHeaders(); + const response = await this.transporter.request({ + ...BaseExternalAccountClient.RETRY_CONFIG, + headers, + url: `${this.cloudResourceManagerURL.toString()}${projectNumber}`, + responseType: 'json', + }); + this.projectId = response.data.projectId; + return this.projectId; + } + return null; + } + /** + * Authenticates the provided HTTP request, processes it and resolves with the + * returned response. + * @param opts The HTTP request options. + * @param reAuthRetried Whether the current attempt is a retry after a failed attempt due to an auth failure. + * @return A promise that resolves with the successful response. + */ + async requestAsync(opts, reAuthRetried = false) { + let response; + try { + const requestHeaders = await this.getRequestHeaders(); + opts.headers = opts.headers || {}; + if (requestHeaders && requestHeaders['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = + requestHeaders['x-goog-user-project']; + } + if (requestHeaders && requestHeaders.Authorization) { + opts.headers.Authorization = requestHeaders.Authorization; + } + response = await this.transporter.request(opts); + } + catch (e) { + const res = e.response; + if (res) { + const statusCode = res.status; + // Retry the request for metadata if the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - forceRefreshOnFailure is true + const isReadableStream = res.config.data instanceof stream.Readable; + const isAuthErr = statusCode === 401 || statusCode === 403; + if (!reAuthRetried && + isAuthErr && + !isReadableStream && + this.forceRefreshOnFailure) { + await this.refreshAccessTokenAsync(); + return await this.requestAsync(opts, true); + } + } + throw e; + } + return response; + } + /** + * Forces token refresh, even if unexpired tokens are currently cached. + * External credentials are exchanged for GCP access tokens via the token + * exchange endpoint and other settings provided in the client options + * object. + * If the service_account_impersonation_url is provided, an additional + * step to exchange the external account GCP access token for a service + * account impersonated token is performed. + * @return A promise that resolves with the fresh GCP access tokens. + */ + async refreshAccessTokenAsync() { + // Use an existing access token request, or cache a new one + __classPrivateFieldSet(this, _BaseExternalAccountClient_pendingAccessToken, __classPrivateFieldGet(this, _BaseExternalAccountClient_pendingAccessToken, "f") || __classPrivateFieldGet(this, _BaseExternalAccountClient_instances, "m", _BaseExternalAccountClient_internalRefreshAccessTokenAsync).call(this), "f"); + try { + return await __classPrivateFieldGet(this, _BaseExternalAccountClient_pendingAccessToken, "f"); + } + finally { + // clear pending access token for future requests + __classPrivateFieldSet(this, _BaseExternalAccountClient_pendingAccessToken, null, "f"); + } + } + /** + * Returns the workload identity pool project number if it is determinable + * from the audience resource name. + * @param audience The STS audience used to determine the project number. + * @return The project number associated with the workload identity pool, if + * this can be determined from the STS audience field. Otherwise, null is + * returned. + */ + getProjectNumber(audience) { + // STS audience pattern: + // //iam.googleapis.com/projects/$PROJECT_NUMBER/locations/... + const match = audience.match(/\/projects\/([^/]+)/); + if (!match) { + return null; + } + return match[1]; + } + /** + * Exchanges an external account GCP access token for a service + * account impersonated access token using iamcredentials + * GenerateAccessToken API. + * @param token The access token to exchange for a service account access + * token. + * @return A promise that resolves with the service account impersonated + * credentials response. + */ + async getImpersonatedAccessToken(token) { + const opts = { + ...BaseExternalAccountClient.RETRY_CONFIG, + url: this.serviceAccountImpersonationUrl, + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}`, + }, + data: { + scope: this.getScopesArray(), + lifetime: this.serviceAccountImpersonationLifetime + 's', + }, + responseType: 'json', + }; + const response = await this.transporter.request(opts); + const successResponse = response.data; + return { + access_token: successResponse.accessToken, + // Convert from ISO format to timestamp. + expiry_date: new Date(successResponse.expireTime).getTime(), + res: response, + }; + } + /** + * Returns whether the provided credentials are expired or not. + * If there is no expiry time, assumes the token is not expired or expiring. + * @param accessToken The credentials to check for expiration. + * @return Whether the credentials are expired or not. + */ + isExpired(accessToken) { + const now = new Date().getTime(); + return accessToken.expiry_date + ? now >= accessToken.expiry_date - this.eagerRefreshThresholdMillis + : false; + } + /** + * @return The list of scopes for the requested GCP access token. + */ + getScopesArray() { + // Since scopes can be provided as string or array, the type should + // be normalized. + if (typeof this.scopes === 'string') { + return [this.scopes]; + } + return this.scopes || [DEFAULT_OAUTH_SCOPE]; + } + getMetricsHeaderValue() { + const nodeVersion = process.version.replace(/^v/, ''); + const saImpersonation = this.serviceAccountImpersonationUrl !== undefined; + const credentialSourceType = this.credentialSourceType + ? this.credentialSourceType + : 'unknown'; + return `gl-node/${nodeVersion} auth/${pkg.version} google-byoid-sdk source/${credentialSourceType} sa-impersonation/${saImpersonation} config-lifetime/${this.configLifetimeRequested}`; + } +} +exports.BaseExternalAccountClient = BaseExternalAccountClient; +_BaseExternalAccountClient_pendingAccessToken = new WeakMap(), _BaseExternalAccountClient_instances = new WeakSet(), _BaseExternalAccountClient_internalRefreshAccessTokenAsync = async function _BaseExternalAccountClient_internalRefreshAccessTokenAsync() { + // Retrieve the external credential. + const subjectToken = await this.retrieveSubjectToken(); + // Construct the STS credentials options. + const stsCredentialsOptions = { + grantType: STS_GRANT_TYPE, + audience: this.audience, + requestedTokenType: STS_REQUEST_TOKEN_TYPE, + subjectToken, + subjectTokenType: this.subjectTokenType, + // generateAccessToken requires the provided access token to have + // scopes: + // https://www.googleapis.com/auth/iam or + // https://www.googleapis.com/auth/cloud-platform + // The new service account access token scopes will match the user + // provided ones. + scope: this.serviceAccountImpersonationUrl + ? [DEFAULT_OAUTH_SCOPE] + : this.getScopesArray(), + }; + // Exchange the external credentials for a GCP access token. + // Client auth is prioritized over passing the workforcePoolUserProject + // parameter for STS token exchange. + const additionalOptions = !this.clientAuth && this.workforcePoolUserProject + ? { userProject: this.workforcePoolUserProject } + : undefined; + const additionalHeaders = { + 'x-goog-api-client': this.getMetricsHeaderValue(), + }; + const stsResponse = await this.stsCredential.exchangeToken(stsCredentialsOptions, additionalHeaders, additionalOptions); + if (this.serviceAccountImpersonationUrl) { + this.cachedAccessToken = await this.getImpersonatedAccessToken(stsResponse.access_token); + } + else if (stsResponse.expires_in) { + // Save response in cached access token. + this.cachedAccessToken = { + access_token: stsResponse.access_token, + expiry_date: new Date().getTime() + stsResponse.expires_in * 1000, + res: stsResponse.res, + }; + } + else { + // Save response in cached access token. + this.cachedAccessToken = { + access_token: stsResponse.access_token, + res: stsResponse.res, + }; + } + // Save credentials. + this.credentials = {}; + Object.assign(this.credentials, this.cachedAccessToken); + delete this.credentials.res; + // Trigger tokens event to notify external listeners. + this.emit('tokens', { + refresh_token: null, + expiry_date: this.cachedAccessToken.expiry_date, + access_token: this.cachedAccessToken.access_token, + token_type: 'Bearer', + id_token: null, + }); + // Return the cached access token. + return this.cachedAccessToken; +}; + + +/***/ }), + +/***/ 977: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2013 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Compute = void 0; +const gaxios_1 = __nccwpck_require__(7003); +const gcpMetadata = __nccwpck_require__(3046); +const oauth2client_1 = __nccwpck_require__(91); +class Compute extends oauth2client_1.OAuth2Client { + /** + * Google Compute Engine service account credentials. + * + * Retrieve access token from the metadata server. + * See: https://cloud.google.com/compute/docs/access/authenticate-workloads#applications + */ + constructor(options = {}) { + super(options); + // Start with an expired refresh token, which will automatically be + // refreshed before the first API call is made. + this.credentials = { expiry_date: 1, refresh_token: 'compute-placeholder' }; + this.serviceAccountEmail = options.serviceAccountEmail || 'default'; + this.scopes = Array.isArray(options.scopes) + ? options.scopes + : options.scopes + ? [options.scopes] + : []; + } + /** + * Refreshes the access token. + * @param refreshToken Unused parameter + */ + async refreshTokenNoCache( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + refreshToken) { + const tokenPath = `service-accounts/${this.serviceAccountEmail}/token`; + let data; + try { + const instanceOptions = { + property: tokenPath, + }; + if (this.scopes.length > 0) { + instanceOptions.params = { + scopes: this.scopes.join(','), + }; + } + data = await gcpMetadata.instance(instanceOptions); + } + catch (e) { + if (e instanceof gaxios_1.GaxiosError) { + e.message = `Could not refresh access token: ${e.message}`; + this.wrapError(e); + } + throw e; + } + const tokens = data; + if (data && data.expires_in) { + tokens.expiry_date = new Date().getTime() + data.expires_in * 1000; + delete tokens.expires_in; + } + this.emit('tokens', tokens); + return { tokens, res: null }; + } + /** + * Fetches an ID token. + * @param targetAudience the audience for the fetched ID token. + */ + async fetchIdToken(targetAudience) { + const idTokenPath = `service-accounts/${this.serviceAccountEmail}/identity` + + `?format=full&audience=${targetAudience}`; + let idToken; + try { + const instanceOptions = { + property: idTokenPath, + }; + idToken = await gcpMetadata.instance(instanceOptions); + } + catch (e) { + if (e instanceof Error) { + e.message = `Could not fetch ID token: ${e.message}`; + } + throw e; + } + return idToken; + } + wrapError(e) { + const res = e.response; + if (res && res.status) { + e.status = res.status; + if (res.status === 403) { + e.message = + 'A Forbidden error was returned while attempting to retrieve an access ' + + 'token for the Compute Engine built-in service account. This may be because the Compute ' + + 'Engine instance does not have the correct permission scopes specified: ' + + e.message; + } + else if (res.status === 404) { + e.message = + 'A Not Found error was returned while attempting to retrieve an access' + + 'token for the Compute Engine built-in service account. This may be because the Compute ' + + 'Engine instance does not have any permission scopes specified: ' + + e.message; + } + } + } +} +exports.Compute = Compute; + + +/***/ }), + +/***/ 9157: +/***/ (function(__unused_webpack_module, exports) { + +"use strict"; + +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _DefaultAwsSecurityCredentialsSupplier_instances, _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken, _DefaultAwsSecurityCredentialsSupplier_getAwsRoleName, _DefaultAwsSecurityCredentialsSupplier_retrieveAwsSecurityCredentials, _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get, _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DefaultAwsSecurityCredentialsSupplier = void 0; +/** + * Internal AWS security credentials supplier implementation used by {@link AwsClient} + * when a credential source is provided instead of a user defined supplier. + * The logic is summarized as: + * 1. If imdsv2_session_token_url is provided in the credential source, then + * fetch the aws session token and include it in the headers of the + * metadata requests. This is a requirement for IDMSv2 but optional + * for IDMSv1. + * 2. Retrieve AWS region from availability-zone. + * 3a. Check AWS credentials in environment variables. If not found, get + * from security-credentials endpoint. + * 3b. Get AWS credentials from security-credentials endpoint. In order + * to retrieve this, the AWS role needs to be determined by calling + * security-credentials endpoint without any argument. Then the + * credentials can be retrieved via: security-credentials/role_name + * 4. Generate the signed request to AWS STS GetCallerIdentity action. + * 5. Inject x-goog-cloud-target-resource into header and serialize the + * signed request. This will be the subject-token to pass to GCP STS. + */ +class DefaultAwsSecurityCredentialsSupplier { + /** + * Instantiates a new DefaultAwsSecurityCredentialsSupplier using information + * from the credential_source stored in the ADC file. + * @param opts The default aws security credentials supplier options object to + * build the supplier with. + */ + constructor(opts) { + _DefaultAwsSecurityCredentialsSupplier_instances.add(this); + this.regionUrl = opts.regionUrl; + this.securityCredentialsUrl = opts.securityCredentialsUrl; + this.imdsV2SessionTokenUrl = opts.imdsV2SessionTokenUrl; + this.additionalGaxiosOptions = opts.additionalGaxiosOptions; + } + /** + * Returns the active AWS region. This first checks to see if the region + * is available as an environment variable. If it is not, then the supplier + * will call the region URL. + * @param context {@link ExternalAccountSupplierContext} from the calling + * {@link AwsClient}, contains the requested audience and subject token type + * for the external account identity. + * @return A promise that resolves with the AWS region string. + */ + async getAwsRegion(context) { + // Priority order for region determination: + // AWS_REGION > AWS_DEFAULT_REGION > metadata server. + if (__classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get)) { + return __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get); + } + const metadataHeaders = {}; + if (!__classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get) && this.imdsV2SessionTokenUrl) { + metadataHeaders['x-aws-ec2-metadata-token'] = + await __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "m", _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken).call(this, context.transporter); + } + if (!this.regionUrl) { + throw new Error('Unable to determine AWS region due to missing ' + + '"options.credential_source.region_url"'); + } + const opts = { + ...this.additionalGaxiosOptions, + url: this.regionUrl, + method: 'GET', + responseType: 'text', + headers: metadataHeaders, + }; + const response = await context.transporter.request(opts); + // Remove last character. For example, if us-east-2b is returned, + // the region would be us-east-2. + return response.data.substr(0, response.data.length - 1); + } + /** + * Returns AWS security credentials. This first checks to see if the credentials + * is available as environment variables. If it is not, then the supplier + * will call the security credentials URL. + * @param context {@link ExternalAccountSupplierContext} from the calling + * {@link AwsClient}, contains the requested audience and subject token type + * for the external account identity. + * @return A promise that resolves with the AWS security credentials. + */ + async getAwsSecurityCredentials(context) { + // Check environment variables for permanent credentials first. + // https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html + if (__classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get)) { + return __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get); + } + const metadataHeaders = {}; + if (this.imdsV2SessionTokenUrl) { + metadataHeaders['x-aws-ec2-metadata-token'] = + await __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "m", _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken).call(this, context.transporter); + } + // Since the role on a VM can change, we don't need to cache it. + const roleName = await __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "m", _DefaultAwsSecurityCredentialsSupplier_getAwsRoleName).call(this, metadataHeaders, context.transporter); + // Temporary credentials typically last for several hours. + // Expiration is returned in response. + // Consider future optimization of this logic to cache AWS tokens + // until their natural expiration. + const awsCreds = await __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "m", _DefaultAwsSecurityCredentialsSupplier_retrieveAwsSecurityCredentials).call(this, roleName, metadataHeaders, context.transporter); + return { + accessKeyId: awsCreds.AccessKeyId, + secretAccessKey: awsCreds.SecretAccessKey, + token: awsCreds.Token, + }; + } +} +exports.DefaultAwsSecurityCredentialsSupplier = DefaultAwsSecurityCredentialsSupplier; +_DefaultAwsSecurityCredentialsSupplier_instances = new WeakSet(), _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken = +/** + * @param transporter The transporter to use for requests. + * @return A promise that resolves with the IMDSv2 Session Token. + */ +async function _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken(transporter) { + const opts = { + ...this.additionalGaxiosOptions, + url: this.imdsV2SessionTokenUrl, + method: 'PUT', + responseType: 'text', + headers: { 'x-aws-ec2-metadata-token-ttl-seconds': '300' }, + }; + const response = await transporter.request(opts); + return response.data; +}, _DefaultAwsSecurityCredentialsSupplier_getAwsRoleName = +/** + * @param headers The headers to be used in the metadata request. + * @param transporter The transporter to use for requests. + * @return A promise that resolves with the assigned role to the current + * AWS VM. This is needed for calling the security-credentials endpoint. + */ +async function _DefaultAwsSecurityCredentialsSupplier_getAwsRoleName(headers, transporter) { + if (!this.securityCredentialsUrl) { + throw new Error('Unable to determine AWS role name due to missing ' + + '"options.credential_source.url"'); + } + const opts = { + ...this.additionalGaxiosOptions, + url: this.securityCredentialsUrl, + method: 'GET', + responseType: 'text', + headers: headers, + }; + const response = await transporter.request(opts); + return response.data; +}, _DefaultAwsSecurityCredentialsSupplier_retrieveAwsSecurityCredentials = +/** + * Retrieves the temporary AWS credentials by calling the security-credentials + * endpoint as specified in the `credential_source` object. + * @param roleName The role attached to the current VM. + * @param headers The headers to be used in the metadata request. + * @param transporter The transporter to use for requests. + * @return A promise that resolves with the temporary AWS credentials + * needed for creating the GetCallerIdentity signed request. + */ +async function _DefaultAwsSecurityCredentialsSupplier_retrieveAwsSecurityCredentials(roleName, headers, transporter) { + const response = await transporter.request({ + ...this.additionalGaxiosOptions, + url: `${this.securityCredentialsUrl}/${roleName}`, + responseType: 'json', + headers: headers, + }); + return response.data; +}, _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get = function _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get() { + // The AWS region can be provided through AWS_REGION or AWS_DEFAULT_REGION. + // Only one is required. + return (process.env['AWS_REGION'] || process.env['AWS_DEFAULT_REGION'] || null); +}, _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get = function _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get() { + // Both AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY are required. + if (process.env['AWS_ACCESS_KEY_ID'] && + process.env['AWS_SECRET_ACCESS_KEY']) { + return { + accessKeyId: process.env['AWS_ACCESS_KEY_ID'], + secretAccessKey: process.env['AWS_SECRET_ACCESS_KEY'], + token: process.env['AWS_SESSION_TOKEN'], + }; + } + return null; +}; + + +/***/ }), + +/***/ 7556: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DownscopedClient = exports.EXPIRATION_TIME_OFFSET = exports.MAX_ACCESS_BOUNDARY_RULES_COUNT = void 0; +const stream = __nccwpck_require__(2203); +const authclient_1 = __nccwpck_require__(4810); +const sts = __nccwpck_require__(121); +/** + * The required token exchange grant_type: rfc8693#section-2.1 + */ +const STS_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:token-exchange'; +/** + * The requested token exchange requested_token_type: rfc8693#section-2.1 + */ +const STS_REQUEST_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; +/** + * The requested token exchange subject_token_type: rfc8693#section-2.1 + */ +const STS_SUBJECT_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; +/** + * The maximum number of access boundary rules a Credential Access Boundary + * can contain. + */ +exports.MAX_ACCESS_BOUNDARY_RULES_COUNT = 10; +/** + * Offset to take into account network delays and server clock skews. + */ +exports.EXPIRATION_TIME_OFFSET = 5 * 60 * 1000; +/** + * Defines a set of Google credentials that are downscoped from an existing set + * of Google OAuth2 credentials. This is useful to restrict the Identity and + * Access Management (IAM) permissions that a short-lived credential can use. + * The common pattern of usage is to have a token broker with elevated access + * generate these downscoped credentials from higher access source credentials + * and pass the downscoped short-lived access tokens to a token consumer via + * some secure authenticated channel for limited access to Google Cloud Storage + * resources. + */ +class DownscopedClient extends authclient_1.AuthClient { + /** + * Instantiates a downscoped client object using the provided source + * AuthClient and credential access boundary rules. + * To downscope permissions of a source AuthClient, a Credential Access + * Boundary that specifies which resources the new credential can access, as + * well as an upper bound on the permissions that are available on each + * resource, has to be defined. A downscoped client can then be instantiated + * using the source AuthClient and the Credential Access Boundary. + * @param authClient The source AuthClient to be downscoped based on the + * provided Credential Access Boundary rules. + * @param credentialAccessBoundary The Credential Access Boundary which + * contains a list of access boundary rules. Each rule contains information + * on the resource that the rule applies to, the upper bound of the + * permissions that are available on that resource and an optional + * condition to further restrict permissions. + * @param additionalOptions **DEPRECATED, set this in the provided `authClient`.** + * Optional additional behavior customization options. + * @param quotaProjectId **DEPRECATED, set this in the provided `authClient`.** + * Optional quota project id for setting up in the x-goog-user-project header. + */ + constructor(authClient, credentialAccessBoundary, additionalOptions, quotaProjectId) { + super({ ...additionalOptions, quotaProjectId }); + this.authClient = authClient; + this.credentialAccessBoundary = credentialAccessBoundary; + // Check 1-10 Access Boundary Rules are defined within Credential Access + // Boundary. + if (credentialAccessBoundary.accessBoundary.accessBoundaryRules.length === 0) { + throw new Error('At least one access boundary rule needs to be defined.'); + } + else if (credentialAccessBoundary.accessBoundary.accessBoundaryRules.length > + exports.MAX_ACCESS_BOUNDARY_RULES_COUNT) { + throw new Error('The provided access boundary has more than ' + + `${exports.MAX_ACCESS_BOUNDARY_RULES_COUNT} access boundary rules.`); + } + // Check at least one permission should be defined in each Access Boundary + // Rule. + for (const rule of credentialAccessBoundary.accessBoundary + .accessBoundaryRules) { + if (rule.availablePermissions.length === 0) { + throw new Error('At least one permission should be defined in access boundary rules.'); + } + } + this.stsCredential = new sts.StsCredentials(`https://sts.${this.universeDomain}/v1/token`); + this.cachedDownscopedAccessToken = null; + } + /** + * Provides a mechanism to inject Downscoped access tokens directly. + * The expiry_date field is required to facilitate determination of the token + * expiration which would make it easier for the token consumer to handle. + * @param credentials The Credentials object to set on the current client. + */ + setCredentials(credentials) { + if (!credentials.expiry_date) { + throw new Error('The access token expiry_date field is missing in the provided ' + + 'credentials.'); + } + super.setCredentials(credentials); + this.cachedDownscopedAccessToken = credentials; + } + async getAccessToken() { + // If the cached access token is unavailable or expired, force refresh. + // The Downscoped access token will be returned in + // DownscopedAccessTokenResponse format. + if (!this.cachedDownscopedAccessToken || + this.isExpired(this.cachedDownscopedAccessToken)) { + await this.refreshAccessTokenAsync(); + } + // Return Downscoped access token in DownscopedAccessTokenResponse format. + return { + token: this.cachedDownscopedAccessToken.access_token, + expirationTime: this.cachedDownscopedAccessToken.expiry_date, + res: this.cachedDownscopedAccessToken.res, + }; + } + /** + * The main authentication interface. It takes an optional url which when + * present is the endpoint being accessed, and returns a Promise which + * resolves with authorization header fields. + * + * The result has the form: + * { Authorization: 'Bearer ' } + */ + async getRequestHeaders() { + const accessTokenResponse = await this.getAccessToken(); + const headers = { + Authorization: `Bearer ${accessTokenResponse.token}`, + }; + return this.addSharedMetadataHeaders(headers); + } + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); + }); + } + else { + return this.requestAsync(opts); + } + } + /** + * Authenticates the provided HTTP request, processes it and resolves with the + * returned response. + * @param opts The HTTP request options. + * @param reAuthRetried Whether the current attempt is a retry after a failed attempt due to an auth failure + * @return A promise that resolves with the successful response. + */ + async requestAsync(opts, reAuthRetried = false) { + let response; + try { + const requestHeaders = await this.getRequestHeaders(); + opts.headers = opts.headers || {}; + if (requestHeaders && requestHeaders['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = + requestHeaders['x-goog-user-project']; + } + if (requestHeaders && requestHeaders.Authorization) { + opts.headers.Authorization = requestHeaders.Authorization; + } + response = await this.transporter.request(opts); + } + catch (e) { + const res = e.response; + if (res) { + const statusCode = res.status; + // Retry the request for metadata if the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - forceRefreshOnFailure is true + const isReadableStream = res.config.data instanceof stream.Readable; + const isAuthErr = statusCode === 401 || statusCode === 403; + if (!reAuthRetried && + isAuthErr && + !isReadableStream && + this.forceRefreshOnFailure) { + await this.refreshAccessTokenAsync(); + return await this.requestAsync(opts, true); + } + } + throw e; + } + return response; + } + /** + * Forces token refresh, even if unexpired tokens are currently cached. + * GCP access tokens are retrieved from authclient object/source credential. + * Then GCP access tokens are exchanged for downscoped access tokens via the + * token exchange endpoint. + * @return A promise that resolves with the fresh downscoped access token. + */ + async refreshAccessTokenAsync() { + var _a; + // Retrieve GCP access token from source credential. + const subjectToken = (await this.authClient.getAccessToken()).token; + // Construct the STS credentials options. + const stsCredentialsOptions = { + grantType: STS_GRANT_TYPE, + requestedTokenType: STS_REQUEST_TOKEN_TYPE, + subjectToken: subjectToken, + subjectTokenType: STS_SUBJECT_TOKEN_TYPE, + }; + // Exchange the source AuthClient access token for a Downscoped access + // token. + const stsResponse = await this.stsCredential.exchangeToken(stsCredentialsOptions, undefined, this.credentialAccessBoundary); + /** + * The STS endpoint will only return the expiration time for the downscoped + * access token if the original access token represents a service account. + * The downscoped token's expiration time will always match the source + * credential expiration. When no expires_in is returned, we can copy the + * source credential's expiration time. + */ + const sourceCredExpireDate = ((_a = this.authClient.credentials) === null || _a === void 0 ? void 0 : _a.expiry_date) || null; + const expiryDate = stsResponse.expires_in + ? new Date().getTime() + stsResponse.expires_in * 1000 + : sourceCredExpireDate; + // Save response in cached access token. + this.cachedDownscopedAccessToken = { + access_token: stsResponse.access_token, + expiry_date: expiryDate, + res: stsResponse.res, + }; + // Save credentials. + this.credentials = {}; + Object.assign(this.credentials, this.cachedDownscopedAccessToken); + delete this.credentials.res; + // Trigger tokens event to notify external listeners. + this.emit('tokens', { + refresh_token: null, + expiry_date: this.cachedDownscopedAccessToken.expiry_date, + access_token: this.cachedDownscopedAccessToken.access_token, + token_type: 'Bearer', + id_token: null, + }); + // Return the cached access token. + return this.cachedDownscopedAccessToken; + } + /** + * Returns whether the provided credentials are expired or not. + * If there is no expiry time, assumes the token is not expired or expiring. + * @param downscopedAccessToken The credentials to check for expiration. + * @return Whether the credentials are expired or not. + */ + isExpired(downscopedAccessToken) { + const now = new Date().getTime(); + return downscopedAccessToken.expiry_date + ? now >= + downscopedAccessToken.expiry_date - this.eagerRefreshThresholdMillis + : false; + } +} +exports.DownscopedClient = DownscopedClient; + + +/***/ }), + +/***/ 963: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2018 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GCPEnv = void 0; +exports.clear = clear; +exports.getEnv = getEnv; +const gcpMetadata = __nccwpck_require__(3046); +var GCPEnv; +(function (GCPEnv) { + GCPEnv["APP_ENGINE"] = "APP_ENGINE"; + GCPEnv["KUBERNETES_ENGINE"] = "KUBERNETES_ENGINE"; + GCPEnv["CLOUD_FUNCTIONS"] = "CLOUD_FUNCTIONS"; + GCPEnv["COMPUTE_ENGINE"] = "COMPUTE_ENGINE"; + GCPEnv["CLOUD_RUN"] = "CLOUD_RUN"; + GCPEnv["NONE"] = "NONE"; +})(GCPEnv || (exports.GCPEnv = GCPEnv = {})); +let envPromise; +function clear() { + envPromise = undefined; +} +async function getEnv() { + if (envPromise) { + return envPromise; + } + envPromise = getEnvMemoized(); + return envPromise; +} +async function getEnvMemoized() { + let env = GCPEnv.NONE; + if (isAppEngine()) { + env = GCPEnv.APP_ENGINE; + } + else if (isCloudFunction()) { + env = GCPEnv.CLOUD_FUNCTIONS; + } + else if (await isComputeEngine()) { + if (await isKubernetesEngine()) { + env = GCPEnv.KUBERNETES_ENGINE; + } + else if (isCloudRun()) { + env = GCPEnv.CLOUD_RUN; + } + else { + env = GCPEnv.COMPUTE_ENGINE; + } + } + else { + env = GCPEnv.NONE; + } + return env; +} +function isAppEngine() { + return !!(process.env.GAE_SERVICE || process.env.GAE_MODULE_NAME); +} +function isCloudFunction() { + return !!(process.env.FUNCTION_NAME || process.env.FUNCTION_TARGET); +} +/** + * This check only verifies that the environment is running knative. + * This must be run *after* checking for Kubernetes, otherwise it will + * return a false positive. + */ +function isCloudRun() { + return !!process.env.K_CONFIGURATION; +} +async function isKubernetesEngine() { + try { + await gcpMetadata.instance('attributes/cluster-name'); + return true; + } + catch (e) { + return false; + } +} +async function isComputeEngine() { + return gcpMetadata.isAvailable(); +} + + +/***/ }), + +/***/ 3247: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.InvalidSubjectTokenError = exports.InvalidMessageFieldError = exports.InvalidCodeFieldError = exports.InvalidTokenTypeFieldError = exports.InvalidExpirationTimeFieldError = exports.InvalidSuccessFieldError = exports.InvalidVersionFieldError = exports.ExecutableResponseError = exports.ExecutableResponse = void 0; +const SAML_SUBJECT_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:saml2'; +const OIDC_SUBJECT_TOKEN_TYPE1 = 'urn:ietf:params:oauth:token-type:id_token'; +const OIDC_SUBJECT_TOKEN_TYPE2 = 'urn:ietf:params:oauth:token-type:jwt'; +/** + * Defines the response of a 3rd party executable run by the pluggable auth client. + */ +class ExecutableResponse { + /** + * Instantiates an ExecutableResponse instance using the provided JSON object + * from the output of the executable. + * @param responseJson Response from a 3rd party executable, loaded from a + * run of the executable or a cached output file. + */ + constructor(responseJson) { + // Check that the required fields exist in the json response. + if (!responseJson.version) { + throw new InvalidVersionFieldError("Executable response must contain a 'version' field."); + } + if (responseJson.success === undefined) { + throw new InvalidSuccessFieldError("Executable response must contain a 'success' field."); + } + this.version = responseJson.version; + this.success = responseJson.success; + // Validate required fields for a successful response. + if (this.success) { + this.expirationTime = responseJson.expiration_time; + this.tokenType = responseJson.token_type; + // Validate token type field. + if (this.tokenType !== SAML_SUBJECT_TOKEN_TYPE && + this.tokenType !== OIDC_SUBJECT_TOKEN_TYPE1 && + this.tokenType !== OIDC_SUBJECT_TOKEN_TYPE2) { + throw new InvalidTokenTypeFieldError("Executable response must contain a 'token_type' field when successful " + + `and it must be one of ${OIDC_SUBJECT_TOKEN_TYPE1}, ${OIDC_SUBJECT_TOKEN_TYPE2}, or ${SAML_SUBJECT_TOKEN_TYPE}.`); + } + // Validate subject token. + if (this.tokenType === SAML_SUBJECT_TOKEN_TYPE) { + if (!responseJson.saml_response) { + throw new InvalidSubjectTokenError(`Executable response must contain a 'saml_response' field when token_type=${SAML_SUBJECT_TOKEN_TYPE}.`); + } + this.subjectToken = responseJson.saml_response; + } + else { + if (!responseJson.id_token) { + throw new InvalidSubjectTokenError("Executable response must contain a 'id_token' field when " + + `token_type=${OIDC_SUBJECT_TOKEN_TYPE1} or ${OIDC_SUBJECT_TOKEN_TYPE2}.`); + } + this.subjectToken = responseJson.id_token; + } + } + else { + // Both code and message must be provided for unsuccessful responses. + if (!responseJson.code) { + throw new InvalidCodeFieldError("Executable response must contain a 'code' field when unsuccessful."); + } + if (!responseJson.message) { + throw new InvalidMessageFieldError("Executable response must contain a 'message' field when unsuccessful."); + } + this.errorCode = responseJson.code; + this.errorMessage = responseJson.message; + } + } + /** + * @return A boolean representing if the response has a valid token. Returns + * true when the response was successful and the token is not expired. + */ + isValid() { + return !this.isExpired() && this.success; + } + /** + * @return A boolean representing if the response is expired. Returns true if the + * provided timeout has passed. + */ + isExpired() { + return (this.expirationTime !== undefined && + this.expirationTime < Math.round(Date.now() / 1000)); + } +} +exports.ExecutableResponse = ExecutableResponse; +/** + * An error thrown by the ExecutableResponse class. + */ +class ExecutableResponseError extends Error { + constructor(message) { + super(message); + Object.setPrototypeOf(this, new.target.prototype); + } +} +exports.ExecutableResponseError = ExecutableResponseError; +/** + * An error thrown when the 'version' field in an executable response is missing or invalid. + */ +class InvalidVersionFieldError extends ExecutableResponseError { +} +exports.InvalidVersionFieldError = InvalidVersionFieldError; +/** + * An error thrown when the 'success' field in an executable response is missing or invalid. + */ +class InvalidSuccessFieldError extends ExecutableResponseError { +} +exports.InvalidSuccessFieldError = InvalidSuccessFieldError; +/** + * An error thrown when the 'expiration_time' field in an executable response is missing or invalid. + */ +class InvalidExpirationTimeFieldError extends ExecutableResponseError { +} +exports.InvalidExpirationTimeFieldError = InvalidExpirationTimeFieldError; +/** + * An error thrown when the 'token_type' field in an executable response is missing or invalid. + */ +class InvalidTokenTypeFieldError extends ExecutableResponseError { +} +exports.InvalidTokenTypeFieldError = InvalidTokenTypeFieldError; +/** + * An error thrown when the 'code' field in an executable response is missing or invalid. + */ +class InvalidCodeFieldError extends ExecutableResponseError { +} +exports.InvalidCodeFieldError = InvalidCodeFieldError; +/** + * An error thrown when the 'message' field in an executable response is missing or invalid. + */ +class InvalidMessageFieldError extends ExecutableResponseError { +} +exports.InvalidMessageFieldError = InvalidMessageFieldError; +/** + * An error thrown when the subject token in an executable response is missing or invalid. + */ +class InvalidSubjectTokenError extends ExecutableResponseError { +} +exports.InvalidSubjectTokenError = InvalidSubjectTokenError; + + +/***/ }), + +/***/ 4240: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ExternalAccountAuthorizedUserClient = exports.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE = void 0; +const authclient_1 = __nccwpck_require__(4810); +const oauth2common_1 = __nccwpck_require__(6653); +const gaxios_1 = __nccwpck_require__(7003); +const stream = __nccwpck_require__(2203); +const baseexternalclient_1 = __nccwpck_require__(142); +/** + * The credentials JSON file type for external account authorized user clients. + */ +exports.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE = 'external_account_authorized_user'; +const DEFAULT_TOKEN_URL = 'https://sts.{universeDomain}/v1/oauthtoken'; +/** + * Handler for token refresh requests sent to the token_url endpoint for external + * authorized user credentials. + */ +class ExternalAccountAuthorizedUserHandler extends oauth2common_1.OAuthClientAuthHandler { + /** + * Initializes an ExternalAccountAuthorizedUserHandler instance. + * @param url The URL of the token refresh endpoint. + * @param transporter The transporter to use for the refresh request. + * @param clientAuthentication The client authentication credentials to use + * for the refresh request. + */ + constructor(url, transporter, clientAuthentication) { + super(clientAuthentication); + this.url = url; + this.transporter = transporter; + } + /** + * Requests a new access token from the token_url endpoint using the provided + * refresh token. + * @param refreshToken The refresh token to use to generate a new access token. + * @param additionalHeaders Optional additional headers to pass along the + * request. + * @return A promise that resolves with the token refresh response containing + * the requested access token and its expiration time. + */ + async refreshToken(refreshToken, additionalHeaders) { + const values = new URLSearchParams({ + grant_type: 'refresh_token', + refresh_token: refreshToken, + }); + const headers = { + 'Content-Type': 'application/x-www-form-urlencoded', + ...additionalHeaders, + }; + const opts = { + ...ExternalAccountAuthorizedUserHandler.RETRY_CONFIG, + url: this.url, + method: 'POST', + headers, + data: values.toString(), + responseType: 'json', + }; + // Apply OAuth client authentication. + this.applyClientAuthenticationOptions(opts); + try { + const response = await this.transporter.request(opts); + // Successful response. + const tokenRefreshResponse = response.data; + tokenRefreshResponse.res = response; + return tokenRefreshResponse; + } + catch (error) { + // Translate error to OAuthError. + if (error instanceof gaxios_1.GaxiosError && error.response) { + throw (0, oauth2common_1.getErrorFromOAuthErrorResponse)(error.response.data, + // Preserve other fields from the original error. + error); + } + // Request could fail before the server responds. + throw error; + } + } +} +/** + * External Account Authorized User Client. This is used for OAuth2 credentials + * sourced using external identities through Workforce Identity Federation. + * Obtaining the initial access and refresh token can be done through the + * Google Cloud CLI. + */ +class ExternalAccountAuthorizedUserClient extends authclient_1.AuthClient { + /** + * Instantiates an ExternalAccountAuthorizedUserClient instances using the + * provided JSON object loaded from a credentials files. + * An error is throws if the credential is not valid. + * @param options The external account authorized user option object typically + * from the external accoutn authorized user JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + var _a; + super({ ...options, ...additionalOptions }); + if (options.universe_domain) { + this.universeDomain = options.universe_domain; + } + this.refreshToken = options.refresh_token; + const clientAuth = { + confidentialClientType: 'basic', + clientId: options.client_id, + clientSecret: options.client_secret, + }; + this.externalAccountAuthorizedUserHandler = + new ExternalAccountAuthorizedUserHandler((_a = options.token_url) !== null && _a !== void 0 ? _a : DEFAULT_TOKEN_URL.replace('{universeDomain}', this.universeDomain), this.transporter, clientAuth); + this.cachedAccessToken = null; + this.quotaProjectId = options.quota_project_id; + // As threshold could be zero, + // eagerRefreshThresholdMillis || EXPIRATION_TIME_OFFSET will override the + // zero value. + if (typeof (additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.eagerRefreshThresholdMillis) !== 'number') { + this.eagerRefreshThresholdMillis = baseexternalclient_1.EXPIRATION_TIME_OFFSET; + } + else { + this.eagerRefreshThresholdMillis = additionalOptions + .eagerRefreshThresholdMillis; + } + this.forceRefreshOnFailure = !!(additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.forceRefreshOnFailure); + } + async getAccessToken() { + // If cached access token is unavailable or expired, force refresh. + if (!this.cachedAccessToken || this.isExpired(this.cachedAccessToken)) { + await this.refreshAccessTokenAsync(); + } + // Return GCP access token in GetAccessTokenResponse format. + return { + token: this.cachedAccessToken.access_token, + res: this.cachedAccessToken.res, + }; + } + async getRequestHeaders() { + const accessTokenResponse = await this.getAccessToken(); + const headers = { + Authorization: `Bearer ${accessTokenResponse.token}`, + }; + return this.addSharedMetadataHeaders(headers); + } + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); + }); + } + else { + return this.requestAsync(opts); + } + } + /** + * Authenticates the provided HTTP request, processes it and resolves with the + * returned response. + * @param opts The HTTP request options. + * @param reAuthRetried Whether the current attempt is a retry after a failed attempt due to an auth failure. + * @return A promise that resolves with the successful response. + */ + async requestAsync(opts, reAuthRetried = false) { + let response; + try { + const requestHeaders = await this.getRequestHeaders(); + opts.headers = opts.headers || {}; + if (requestHeaders && requestHeaders['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = + requestHeaders['x-goog-user-project']; + } + if (requestHeaders && requestHeaders.Authorization) { + opts.headers.Authorization = requestHeaders.Authorization; + } + response = await this.transporter.request(opts); + } + catch (e) { + const res = e.response; + if (res) { + const statusCode = res.status; + // Retry the request for metadata if the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - forceRefreshOnFailure is true + const isReadableStream = res.config.data instanceof stream.Readable; + const isAuthErr = statusCode === 401 || statusCode === 403; + if (!reAuthRetried && + isAuthErr && + !isReadableStream && + this.forceRefreshOnFailure) { + await this.refreshAccessTokenAsync(); + return await this.requestAsync(opts, true); + } + } + throw e; + } + return response; + } + /** + * Forces token refresh, even if unexpired tokens are currently cached. + * @return A promise that resolves with the refreshed credential. + */ + async refreshAccessTokenAsync() { + // Refresh the access token using the refresh token. + const refreshResponse = await this.externalAccountAuthorizedUserHandler.refreshToken(this.refreshToken); + this.cachedAccessToken = { + access_token: refreshResponse.access_token, + expiry_date: new Date().getTime() + refreshResponse.expires_in * 1000, + res: refreshResponse.res, + }; + if (refreshResponse.refresh_token !== undefined) { + this.refreshToken = refreshResponse.refresh_token; + } + return this.cachedAccessToken; + } + /** + * Returns whether the provided credentials are expired or not. + * If there is no expiry time, assumes the token is not expired or expiring. + * @param credentials The credentials to check for expiration. + * @return Whether the credentials are expired or not. + */ + isExpired(credentials) { + const now = new Date().getTime(); + return credentials.expiry_date + ? now >= credentials.expiry_date - this.eagerRefreshThresholdMillis + : false; + } +} +exports.ExternalAccountAuthorizedUserClient = ExternalAccountAuthorizedUserClient; + + +/***/ }), + +/***/ 8323: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ExternalAccountClient = void 0; +const baseexternalclient_1 = __nccwpck_require__(142); +const identitypoolclient_1 = __nccwpck_require__(9960); +const awsclient_1 = __nccwpck_require__(1261); +const pluggable_auth_client_1 = __nccwpck_require__(6077); +/** + * Dummy class with no constructor. Developers are expected to use fromJSON. + */ +class ExternalAccountClient { + constructor() { + throw new Error('ExternalAccountClients should be initialized via: ' + + 'ExternalAccountClient.fromJSON(), ' + + 'directly via explicit constructors, eg. ' + + 'new AwsClient(options), new IdentityPoolClient(options), new' + + 'PluggableAuthClientOptions, or via ' + + 'new GoogleAuth(options).getClient()'); + } + /** + * This static method will instantiate the + * corresponding type of external account credential depending on the + * underlying credential source. + * @param options The external account options object typically loaded + * from the external account JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + * @return A BaseExternalAccountClient instance or null if the options + * provided do not correspond to an external account credential. + */ + static fromJSON(options, additionalOptions) { + var _a, _b; + if (options && options.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { + if ((_a = options.credential_source) === null || _a === void 0 ? void 0 : _a.environment_id) { + return new awsclient_1.AwsClient(options, additionalOptions); + } + else if ((_b = options.credential_source) === null || _b === void 0 ? void 0 : _b.executable) { + return new pluggable_auth_client_1.PluggableAuthClient(options, additionalOptions); + } + else { + return new identitypoolclient_1.IdentityPoolClient(options, additionalOptions); + } + } + else { + return null; + } + } +} +exports.ExternalAccountClient = ExternalAccountClient; + + +/***/ }), + +/***/ 932: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var _a, _b, _c; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.FileSubjectTokenSupplier = void 0; +const util_1 = __nccwpck_require__(9023); +const fs = __nccwpck_require__(9896); +// fs.readfile is undefined in browser karma tests causing +// `npm run browser-test` to fail as test.oauth2.ts imports this file via +// src/index.ts. +// Fallback to void function to avoid promisify throwing a TypeError. +const readFile = (0, util_1.promisify)((_a = fs.readFile) !== null && _a !== void 0 ? _a : (() => { })); +const realpath = (0, util_1.promisify)((_b = fs.realpath) !== null && _b !== void 0 ? _b : (() => { })); +const lstat = (0, util_1.promisify)((_c = fs.lstat) !== null && _c !== void 0 ? _c : (() => { })); +/** + * Internal subject token supplier implementation used when a file location + * is configured in the credential configuration used to build an {@link IdentityPoolClient} + */ +class FileSubjectTokenSupplier { + /** + * Instantiates a new file based subject token supplier. + * @param opts The file subject token supplier options to build the supplier + * with. + */ + constructor(opts) { + this.filePath = opts.filePath; + this.formatType = opts.formatType; + this.subjectTokenFieldName = opts.subjectTokenFieldName; + } + /** + * Returns the subject token stored at the file specified in the constructor. + * @param context {@link ExternalAccountSupplierContext} from the calling + * {@link IdentityPoolClient}, contains the requested audience and subject + * token type for the external account identity. Not used. + */ + async getSubjectToken(context) { + // Make sure there is a file at the path. lstatSync will throw if there is + // nothing there. + let parsedFilePath = this.filePath; + try { + // Resolve path to actual file in case of symlink. Expect a thrown error + // if not resolvable. + parsedFilePath = await realpath(parsedFilePath); + if (!(await lstat(parsedFilePath)).isFile()) { + throw new Error(); + } + } + catch (err) { + if (err instanceof Error) { + err.message = `The file at ${parsedFilePath} does not exist, or it is not a file. ${err.message}`; + } + throw err; + } + let subjectToken; + const rawText = await readFile(parsedFilePath, { encoding: 'utf8' }); + if (this.formatType === 'text') { + subjectToken = rawText; + } + else if (this.formatType === 'json' && this.subjectTokenFieldName) { + const json = JSON.parse(rawText); + subjectToken = json[this.subjectTokenFieldName]; + } + if (!subjectToken) { + throw new Error('Unable to parse the subject_token from the credential_source file'); + } + return subjectToken; + } +} +exports.FileSubjectTokenSupplier = FileSubjectTokenSupplier; + + +/***/ }), + +/***/ 5934: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var _GoogleAuth_instances, _GoogleAuth_pendingAuthClient, _GoogleAuth_prepareAndCacheClient, _GoogleAuth_determineClient; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GoogleAuth = exports.GoogleAuthExceptionMessages = exports.CLOUD_SDK_CLIENT_ID = void 0; +const child_process_1 = __nccwpck_require__(5317); +const fs = __nccwpck_require__(9896); +const gcpMetadata = __nccwpck_require__(3046); +const os = __nccwpck_require__(857); +const path = __nccwpck_require__(6928); +const crypto_1 = __nccwpck_require__(8851); +const transporters_1 = __nccwpck_require__(7633); +const computeclient_1 = __nccwpck_require__(977); +const idtokenclient_1 = __nccwpck_require__(2718); +const envDetect_1 = __nccwpck_require__(963); +const jwtclient_1 = __nccwpck_require__(5277); +const refreshclient_1 = __nccwpck_require__(9807); +const impersonated_1 = __nccwpck_require__(9964); +const externalclient_1 = __nccwpck_require__(8323); +const baseexternalclient_1 = __nccwpck_require__(142); +const authclient_1 = __nccwpck_require__(4810); +const externalAccountAuthorizedUserClient_1 = __nccwpck_require__(4240); +const util_1 = __nccwpck_require__(7870); +exports.CLOUD_SDK_CLIENT_ID = '764086051850-6qr4p6gpi6hn506pt8ejuq83di341hur.apps.googleusercontent.com'; +exports.GoogleAuthExceptionMessages = { + API_KEY_WITH_CREDENTIALS: 'API Keys and Credentials are mutually exclusive authentication methods and cannot be used together.', + NO_PROJECT_ID_FOUND: 'Unable to detect a Project Id in the current environment. \n' + + 'To learn more about authentication and Google APIs, visit: \n' + + 'https://cloud.google.com/docs/authentication/getting-started', + NO_CREDENTIALS_FOUND: 'Unable to find credentials in current environment. \n' + + 'To learn more about authentication and Google APIs, visit: \n' + + 'https://cloud.google.com/docs/authentication/getting-started', + NO_ADC_FOUND: 'Could not load the default credentials. Browse to https://cloud.google.com/docs/authentication/getting-started for more information.', + NO_UNIVERSE_DOMAIN_FOUND: 'Unable to detect a Universe Domain in the current environment.\n' + + 'To learn more about Universe Domain retrieval, visit: \n' + + 'https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys', +}; +class GoogleAuth { + // Note: this properly is only public to satisfy unit tests. + // https://github.com/Microsoft/TypeScript/issues/5228 + get isGCE() { + return this.checkIsGCE; + } + /** + * Configuration is resolved in the following order of precedence: + * - {@link GoogleAuthOptions.credentials `credentials`} + * - {@link GoogleAuthOptions.keyFilename `keyFilename`} + * - {@link GoogleAuthOptions.keyFile `keyFile`} + * + * {@link GoogleAuthOptions.clientOptions `clientOptions`} are passed to the + * {@link AuthClient `AuthClient`s}. + * + * @param opts + */ + constructor(opts = {}) { + _GoogleAuth_instances.add(this); + /** + * Caches a value indicating whether the auth layer is running on Google + * Compute Engine. + * @private + */ + this.checkIsGCE = undefined; + // To save the contents of the JSON credential file + this.jsonContent = null; + this.cachedCredential = null; + /** + * A pending {@link AuthClient}. Used for concurrent {@link GoogleAuth.getClient} calls. + */ + _GoogleAuth_pendingAuthClient.set(this, null); + this.clientOptions = {}; + this._cachedProjectId = opts.projectId || null; + this.cachedCredential = opts.authClient || null; + this.keyFilename = opts.keyFilename || opts.keyFile; + this.scopes = opts.scopes; + this.clientOptions = opts.clientOptions || {}; + this.jsonContent = opts.credentials || null; + this.apiKey = opts.apiKey || this.clientOptions.apiKey || null; + // Cannot use both API Key + Credentials + if (this.apiKey && (this.jsonContent || this.clientOptions.credentials)) { + throw new RangeError(exports.GoogleAuthExceptionMessages.API_KEY_WITH_CREDENTIALS); + } + if (opts.universeDomain) { + this.clientOptions.universeDomain = opts.universeDomain; + } + } + // GAPIC client libraries should always use self-signed JWTs. The following + // variables are set on the JWT client in order to indicate the type of library, + // and sign the JWT with the correct audience and scopes (if not supplied). + setGapicJWTValues(client) { + client.defaultServicePath = this.defaultServicePath; + client.useJWTAccessWithScope = this.useJWTAccessWithScope; + client.defaultScopes = this.defaultScopes; + } + getProjectId(callback) { + if (callback) { + this.getProjectIdAsync().then(r => callback(null, r), callback); + } + else { + return this.getProjectIdAsync(); + } + } + /** + * A temporary method for internal `getProjectId` usages where `null` is + * acceptable. In a future major release, `getProjectId` should return `null` + * (as the `Promise` base signature describes) and this private + * method should be removed. + * + * @returns Promise that resolves with project id (or `null`) + */ + async getProjectIdOptional() { + try { + return await this.getProjectId(); + } + catch (e) { + if (e instanceof Error && + e.message === exports.GoogleAuthExceptionMessages.NO_PROJECT_ID_FOUND) { + return null; + } + else { + throw e; + } + } + } + /** + * A private method for finding and caching a projectId. + * + * Supports environments in order of precedence: + * - GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variable + * - GOOGLE_APPLICATION_CREDENTIALS JSON file + * - Cloud SDK: `gcloud config config-helper --format json` + * - GCE project ID from metadata server + * + * @returns projectId + */ + async findAndCacheProjectId() { + let projectId = null; + projectId || (projectId = await this.getProductionProjectId()); + projectId || (projectId = await this.getFileProjectId()); + projectId || (projectId = await this.getDefaultServiceProjectId()); + projectId || (projectId = await this.getGCEProjectId()); + projectId || (projectId = await this.getExternalAccountClientProjectId()); + if (projectId) { + this._cachedProjectId = projectId; + return projectId; + } + else { + throw new Error(exports.GoogleAuthExceptionMessages.NO_PROJECT_ID_FOUND); + } + } + async getProjectIdAsync() { + if (this._cachedProjectId) { + return this._cachedProjectId; + } + if (!this._findProjectIdPromise) { + this._findProjectIdPromise = this.findAndCacheProjectId(); + } + return this._findProjectIdPromise; + } + /** + * Retrieves a universe domain from the metadata server via + * {@link gcpMetadata.universe}. + * + * @returns a universe domain + */ + async getUniverseDomainFromMetadataServer() { + var _a; + let universeDomain; + try { + universeDomain = await gcpMetadata.universe('universe-domain'); + universeDomain || (universeDomain = authclient_1.DEFAULT_UNIVERSE); + } + catch (e) { + if (e && ((_a = e === null || e === void 0 ? void 0 : e.response) === null || _a === void 0 ? void 0 : _a.status) === 404) { + universeDomain = authclient_1.DEFAULT_UNIVERSE; + } + else { + throw e; + } + } + return universeDomain; + } + /** + * Retrieves, caches, and returns the universe domain in the following order + * of precedence: + * - The universe domain in {@link GoogleAuth.clientOptions} + * - An existing or ADC {@link AuthClient}'s universe domain + * - {@link gcpMetadata.universe}, if {@link Compute} client + * + * @returns The universe domain + */ + async getUniverseDomain() { + let universeDomain = (0, util_1.originalOrCamelOptions)(this.clientOptions).get('universe_domain'); + try { + universeDomain !== null && universeDomain !== void 0 ? universeDomain : (universeDomain = (await this.getClient()).universeDomain); + } + catch (_a) { + // client or ADC is not available + universeDomain !== null && universeDomain !== void 0 ? universeDomain : (universeDomain = authclient_1.DEFAULT_UNIVERSE); + } + return universeDomain; + } + /** + * @returns Any scopes (user-specified or default scopes specified by the + * client library) that need to be set on the current Auth client. + */ + getAnyScopes() { + return this.scopes || this.defaultScopes; + } + getApplicationDefault(optionsOrCallback = {}, callback) { + let options; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else { + options = optionsOrCallback; + } + if (callback) { + this.getApplicationDefaultAsync(options).then(r => callback(null, r.credential, r.projectId), callback); + } + else { + return this.getApplicationDefaultAsync(options); + } + } + async getApplicationDefaultAsync(options = {}) { + // If we've already got a cached credential, return it. + // This will also preserve one's configured quota project, in case they + // set one directly on the credential previously. + if (this.cachedCredential) { + // cache, while preserving existing quota project preferences + return await __classPrivateFieldGet(this, _GoogleAuth_instances, "m", _GoogleAuth_prepareAndCacheClient).call(this, this.cachedCredential, null); + } + let credential; + // Check for the existence of a local environment variable pointing to the + // location of the credential file. This is typically used in local + // developer scenarios. + credential = + await this._tryGetApplicationCredentialsFromEnvironmentVariable(options); + if (credential) { + if (credential instanceof jwtclient_1.JWT) { + credential.scopes = this.scopes; + } + else if (credential instanceof baseexternalclient_1.BaseExternalAccountClient) { + credential.scopes = this.getAnyScopes(); + } + return await __classPrivateFieldGet(this, _GoogleAuth_instances, "m", _GoogleAuth_prepareAndCacheClient).call(this, credential); + } + // Look in the well-known credential file location. + credential = + await this._tryGetApplicationCredentialsFromWellKnownFile(options); + if (credential) { + if (credential instanceof jwtclient_1.JWT) { + credential.scopes = this.scopes; + } + else if (credential instanceof baseexternalclient_1.BaseExternalAccountClient) { + credential.scopes = this.getAnyScopes(); + } + return await __classPrivateFieldGet(this, _GoogleAuth_instances, "m", _GoogleAuth_prepareAndCacheClient).call(this, credential); + } + // Determine if we're running on GCE. + if (await this._checkIsGCE()) { + options.scopes = this.getAnyScopes(); + return await __classPrivateFieldGet(this, _GoogleAuth_instances, "m", _GoogleAuth_prepareAndCacheClient).call(this, new computeclient_1.Compute(options)); + } + throw new Error(exports.GoogleAuthExceptionMessages.NO_ADC_FOUND); + } + /** + * Determines whether the auth layer is running on Google Compute Engine. + * Checks for GCP Residency, then fallback to checking if metadata server + * is available. + * + * @returns A promise that resolves with the boolean. + * @api private + */ + async _checkIsGCE() { + if (this.checkIsGCE === undefined) { + this.checkIsGCE = + gcpMetadata.getGCPResidency() || (await gcpMetadata.isAvailable()); + } + return this.checkIsGCE; + } + /** + * Attempts to load default credentials from the environment variable path.. + * @returns Promise that resolves with the OAuth2Client or null. + * @api private + */ + async _tryGetApplicationCredentialsFromEnvironmentVariable(options) { + const credentialsPath = process.env['GOOGLE_APPLICATION_CREDENTIALS'] || + process.env['google_application_credentials']; + if (!credentialsPath || credentialsPath.length === 0) { + return null; + } + try { + return this._getApplicationCredentialsFromFilePath(credentialsPath, options); + } + catch (e) { + if (e instanceof Error) { + e.message = `Unable to read the credential file specified by the GOOGLE_APPLICATION_CREDENTIALS environment variable: ${e.message}`; + } + throw e; + } + } + /** + * Attempts to load default credentials from a well-known file location + * @return Promise that resolves with the OAuth2Client or null. + * @api private + */ + async _tryGetApplicationCredentialsFromWellKnownFile(options) { + // First, figure out the location of the file, depending upon the OS type. + let location = null; + if (this._isWindows()) { + // Windows + location = process.env['APPDATA']; + } + else { + // Linux or Mac + const home = process.env['HOME']; + if (home) { + location = path.join(home, '.config'); + } + } + // If we found the root path, expand it. + if (location) { + location = path.join(location, 'gcloud', 'application_default_credentials.json'); + if (!fs.existsSync(location)) { + location = null; + } + } + // The file does not exist. + if (!location) { + return null; + } + // The file seems to exist. Try to use it. + const client = await this._getApplicationCredentialsFromFilePath(location, options); + return client; + } + /** + * Attempts to load default credentials from a file at the given path.. + * @param filePath The path to the file to read. + * @returns Promise that resolves with the OAuth2Client + * @api private + */ + async _getApplicationCredentialsFromFilePath(filePath, options = {}) { + // Make sure the path looks like a string. + if (!filePath || filePath.length === 0) { + throw new Error('The file path is invalid.'); + } + // Make sure there is a file at the path. lstatSync will throw if there is + // nothing there. + try { + // Resolve path to actual file in case of symlink. Expect a thrown error + // if not resolvable. + filePath = fs.realpathSync(filePath); + if (!fs.lstatSync(filePath).isFile()) { + throw new Error(); + } + } + catch (err) { + if (err instanceof Error) { + err.message = `The file at ${filePath} does not exist, or it is not a file. ${err.message}`; + } + throw err; + } + // Now open a read stream on the file, and parse it. + const readStream = fs.createReadStream(filePath); + return this.fromStream(readStream, options); + } + /** + * Create a credentials instance using a given impersonated input options. + * @param json The impersonated input object. + * @returns JWT or UserRefresh Client with data + */ + fromImpersonatedJSON(json) { + var _a, _b, _c, _d; + if (!json) { + throw new Error('Must pass in a JSON object containing an impersonated refresh token'); + } + if (json.type !== impersonated_1.IMPERSONATED_ACCOUNT_TYPE) { + throw new Error(`The incoming JSON object does not have the "${impersonated_1.IMPERSONATED_ACCOUNT_TYPE}" type`); + } + if (!json.source_credentials) { + throw new Error('The incoming JSON object does not contain a source_credentials field'); + } + if (!json.service_account_impersonation_url) { + throw new Error('The incoming JSON object does not contain a service_account_impersonation_url field'); + } + const sourceClient = this.fromJSON(json.source_credentials); + if (((_a = json.service_account_impersonation_url) === null || _a === void 0 ? void 0 : _a.length) > 256) { + /** + * Prevents DOS attacks. + * @see {@link https://github.com/googleapis/google-auth-library-nodejs/security/code-scanning/85} + **/ + throw new RangeError(`Target principal is too long: ${json.service_account_impersonation_url}`); + } + // Extract service account from service_account_impersonation_url + const targetPrincipal = (_c = (_b = /(?[^/]+):(generateAccessToken|generateIdToken)$/.exec(json.service_account_impersonation_url)) === null || _b === void 0 ? void 0 : _b.groups) === null || _c === void 0 ? void 0 : _c.target; + if (!targetPrincipal) { + throw new RangeError(`Cannot extract target principal from ${json.service_account_impersonation_url}`); + } + const targetScopes = (_d = this.getAnyScopes()) !== null && _d !== void 0 ? _d : []; + return new impersonated_1.Impersonated({ + ...json, + sourceClient, + targetPrincipal, + targetScopes: Array.isArray(targetScopes) ? targetScopes : [targetScopes], + }); + } + /** + * Create a credentials instance using the given input options. + * This client is not cached. + * + * **Important**: If you accept a credential configuration (credential JSON/File/Stream) from an external source for authentication to Google Cloud, you must validate it before providing it to any Google API or library. Providing an unvalidated credential configuration to Google APIs can compromise the security of your systems and data. For more information, refer to {@link https://cloud.google.com/docs/authentication/external/externally-sourced-credentials Validate credential configurations from external sources}. + * + * @param json The input object. + * @param options The JWT or UserRefresh options for the client + * @returns JWT or UserRefresh Client with data + */ + fromJSON(json, options = {}) { + let client; + // user's preferred universe domain + const preferredUniverseDomain = (0, util_1.originalOrCamelOptions)(options).get('universe_domain'); + if (json.type === refreshclient_1.USER_REFRESH_ACCOUNT_TYPE) { + client = new refreshclient_1.UserRefreshClient(options); + client.fromJSON(json); + } + else if (json.type === impersonated_1.IMPERSONATED_ACCOUNT_TYPE) { + client = this.fromImpersonatedJSON(json); + } + else if (json.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { + client = externalclient_1.ExternalAccountClient.fromJSON(json, options); + client.scopes = this.getAnyScopes(); + } + else if (json.type === externalAccountAuthorizedUserClient_1.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE) { + client = new externalAccountAuthorizedUserClient_1.ExternalAccountAuthorizedUserClient(json, options); + } + else { + options.scopes = this.scopes; + client = new jwtclient_1.JWT(options); + this.setGapicJWTValues(client); + client.fromJSON(json); + } + if (preferredUniverseDomain) { + client.universeDomain = preferredUniverseDomain; + } + return client; + } + /** + * Return a JWT or UserRefreshClient from JavaScript object, caching both the + * object used to instantiate and the client. + * @param json The input object. + * @param options The JWT or UserRefresh options for the client + * @returns JWT or UserRefresh Client with data + */ + _cacheClientFromJSON(json, options) { + const client = this.fromJSON(json, options); + // cache both raw data used to instantiate client and client itself. + this.jsonContent = json; + this.cachedCredential = client; + return client; + } + fromStream(inputStream, optionsOrCallback = {}, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else { + options = optionsOrCallback; + } + if (callback) { + this.fromStreamAsync(inputStream, options).then(r => callback(null, r), callback); + } + else { + return this.fromStreamAsync(inputStream, options); + } + } + fromStreamAsync(inputStream, options) { + return new Promise((resolve, reject) => { + if (!inputStream) { + throw new Error('Must pass in a stream containing the Google auth settings.'); + } + const chunks = []; + inputStream + .setEncoding('utf8') + .on('error', reject) + .on('data', chunk => chunks.push(chunk)) + .on('end', () => { + try { + try { + const data = JSON.parse(chunks.join('')); + const r = this._cacheClientFromJSON(data, options); + return resolve(r); + } + catch (err) { + // If we failed parsing this.keyFileName, assume that it + // is a PEM or p12 certificate: + if (!this.keyFilename) + throw err; + const client = new jwtclient_1.JWT({ + ...this.clientOptions, + keyFile: this.keyFilename, + }); + this.cachedCredential = client; + this.setGapicJWTValues(client); + return resolve(client); + } + } + catch (err) { + return reject(err); + } + }); + }); + } + /** + * Create a credentials instance using the given API key string. + * The created client is not cached. In order to create and cache it use the {@link GoogleAuth.getClient `getClient`} method after first providing an {@link GoogleAuth.apiKey `apiKey`}. + * + * @param apiKey The API key string + * @param options An optional options object. + * @returns A JWT loaded from the key + */ + fromAPIKey(apiKey, options = {}) { + return new jwtclient_1.JWT({ ...options, apiKey }); + } + /** + * Determines whether the current operating system is Windows. + * @api private + */ + _isWindows() { + const sys = os.platform(); + if (sys && sys.length >= 3) { + if (sys.substring(0, 3).toLowerCase() === 'win') { + return true; + } + } + return false; + } + /** + * Run the Google Cloud SDK command that prints the default project ID + */ + async getDefaultServiceProjectId() { + return new Promise(resolve => { + (0, child_process_1.exec)('gcloud config config-helper --format json', (err, stdout) => { + if (!err && stdout) { + try { + const projectId = JSON.parse(stdout).configuration.properties.core.project; + resolve(projectId); + return; + } + catch (e) { + // ignore errors + } + } + resolve(null); + }); + }); + } + /** + * Loads the project id from environment variables. + * @api private + */ + getProductionProjectId() { + return (process.env['GCLOUD_PROJECT'] || + process.env['GOOGLE_CLOUD_PROJECT'] || + process.env['gcloud_project'] || + process.env['google_cloud_project']); + } + /** + * Loads the project id from the GOOGLE_APPLICATION_CREDENTIALS json file. + * @api private + */ + async getFileProjectId() { + if (this.cachedCredential) { + // Try to read the project ID from the cached credentials file + return this.cachedCredential.projectId; + } + // Ensure the projectId is loaded from the keyFile if available. + if (this.keyFilename) { + const creds = await this.getClient(); + if (creds && creds.projectId) { + return creds.projectId; + } + } + // Try to load a credentials file and read its project ID + const r = await this._tryGetApplicationCredentialsFromEnvironmentVariable(); + if (r) { + return r.projectId; + } + else { + return null; + } + } + /** + * Gets the project ID from external account client if available. + */ + async getExternalAccountClientProjectId() { + if (!this.jsonContent || this.jsonContent.type !== baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { + return null; + } + const creds = await this.getClient(); + // Do not suppress the underlying error, as the error could contain helpful + // information for debugging and fixing. This is especially true for + // external account creds as in order to get the project ID, the following + // operations have to succeed: + // 1. Valid credentials file should be supplied. + // 2. Ability to retrieve access tokens from STS token exchange API. + // 3. Ability to exchange for service account impersonated credentials (if + // enabled). + // 4. Ability to get project info using the access token from step 2 or 3. + // Without surfacing the error, it is harder for developers to determine + // which step went wrong. + return await creds.getProjectId(); + } + /** + * Gets the Compute Engine project ID if it can be inferred. + */ + async getGCEProjectId() { + try { + const r = await gcpMetadata.project('project-id'); + return r; + } + catch (e) { + // Ignore any errors + return null; + } + } + getCredentials(callback) { + if (callback) { + this.getCredentialsAsync().then(r => callback(null, r), callback); + } + else { + return this.getCredentialsAsync(); + } + } + async getCredentialsAsync() { + const client = await this.getClient(); + if (client instanceof impersonated_1.Impersonated) { + return { client_email: client.getTargetPrincipal() }; + } + if (client instanceof baseexternalclient_1.BaseExternalAccountClient) { + const serviceAccountEmail = client.getServiceAccountEmail(); + if (serviceAccountEmail) { + return { + client_email: serviceAccountEmail, + universe_domain: client.universeDomain, + }; + } + } + if (this.jsonContent) { + return { + client_email: this.jsonContent.client_email, + private_key: this.jsonContent.private_key, + universe_domain: this.jsonContent.universe_domain, + }; + } + if (await this._checkIsGCE()) { + const [client_email, universe_domain] = await Promise.all([ + gcpMetadata.instance('service-accounts/default/email'), + this.getUniverseDomain(), + ]); + return { client_email, universe_domain }; + } + throw new Error(exports.GoogleAuthExceptionMessages.NO_CREDENTIALS_FOUND); + } + /** + * Automatically obtain an {@link AuthClient `AuthClient`} based on the + * provided configuration. If no options were passed, use Application + * Default Credentials. + */ + async getClient() { + if (this.cachedCredential) { + return this.cachedCredential; + } + // Use an existing auth client request, or cache a new one + __classPrivateFieldSet(this, _GoogleAuth_pendingAuthClient, __classPrivateFieldGet(this, _GoogleAuth_pendingAuthClient, "f") || __classPrivateFieldGet(this, _GoogleAuth_instances, "m", _GoogleAuth_determineClient).call(this), "f"); + try { + return await __classPrivateFieldGet(this, _GoogleAuth_pendingAuthClient, "f"); + } + finally { + // reset the pending auth client in case it is changed later + __classPrivateFieldSet(this, _GoogleAuth_pendingAuthClient, null, "f"); + } + } + /** + * Creates a client which will fetch an ID token for authorization. + * @param targetAudience the audience for the fetched ID token. + * @returns IdTokenClient for making HTTP calls authenticated with ID tokens. + */ + async getIdTokenClient(targetAudience) { + const client = await this.getClient(); + if (!('fetchIdToken' in client)) { + throw new Error('Cannot fetch ID token in this environment, use GCE or set the GOOGLE_APPLICATION_CREDENTIALS environment variable to a service account credentials JSON file.'); + } + return new idtokenclient_1.IdTokenClient({ targetAudience, idTokenProvider: client }); + } + /** + * Automatically obtain application default credentials, and return + * an access token for making requests. + */ + async getAccessToken() { + const client = await this.getClient(); + return (await client.getAccessToken()).token; + } + /** + * Obtain the HTTP headers that will provide authorization for a given + * request. + */ + async getRequestHeaders(url) { + const client = await this.getClient(); + return client.getRequestHeaders(url); + } + /** + * Obtain credentials for a request, then attach the appropriate headers to + * the request options. + * @param opts Axios or Request options on which to attach the headers + */ + async authorizeRequest(opts) { + opts = opts || {}; + const url = opts.url || opts.uri; + const client = await this.getClient(); + const headers = await client.getRequestHeaders(url); + opts.headers = Object.assign(opts.headers || {}, headers); + return opts; + } + /** + * Automatically obtain application default credentials, and make an + * HTTP request using the given options. + * @param opts Axios request options for the HTTP request. + */ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + async request(opts) { + const client = await this.getClient(); + return client.request(opts); + } + /** + * Determine the compute environment in which the code is running. + */ + getEnv() { + return (0, envDetect_1.getEnv)(); + } + /** + * Sign the given data with the current private key, or go out + * to the IAM API to sign it. + * @param data The data to be signed. + * @param endpoint A custom endpoint to use. + * + * @example + * ``` + * sign('data', 'https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/'); + * ``` + */ + async sign(data, endpoint) { + const client = await this.getClient(); + const universe = await this.getUniverseDomain(); + endpoint = + endpoint || + `https://iamcredentials.${universe}/v1/projects/-/serviceAccounts/`; + if (client instanceof impersonated_1.Impersonated) { + const signed = await client.sign(data); + return signed.signedBlob; + } + const crypto = (0, crypto_1.createCrypto)(); + if (client instanceof jwtclient_1.JWT && client.key) { + const sign = await crypto.sign(client.key, data); + return sign; + } + const creds = await this.getCredentials(); + if (!creds.client_email) { + throw new Error('Cannot sign data without `client_email`.'); + } + return this.signBlob(crypto, creds.client_email, data, endpoint); + } + async signBlob(crypto, emailOrUniqueId, data, endpoint) { + const url = new URL(endpoint + `${emailOrUniqueId}:signBlob`); + const res = await this.request({ + method: 'POST', + url: url.href, + data: { + payload: crypto.encodeBase64StringUtf8(data), + }, + retry: true, + retryConfig: { + httpMethodsToRetry: ['POST'], + }, + }); + return res.data.signedBlob; + } +} +exports.GoogleAuth = GoogleAuth; +_GoogleAuth_pendingAuthClient = new WeakMap(), _GoogleAuth_instances = new WeakSet(), _GoogleAuth_prepareAndCacheClient = async function _GoogleAuth_prepareAndCacheClient(credential, quotaProjectIdOverride = process.env['GOOGLE_CLOUD_QUOTA_PROJECT'] || null) { + const projectId = await this.getProjectIdOptional(); + if (quotaProjectIdOverride) { + credential.quotaProjectId = quotaProjectIdOverride; + } + this.cachedCredential = credential; + return { credential, projectId }; +}, _GoogleAuth_determineClient = async function _GoogleAuth_determineClient() { + if (this.jsonContent) { + return this._cacheClientFromJSON(this.jsonContent, this.clientOptions); + } + else if (this.keyFilename) { + const filePath = path.resolve(this.keyFilename); + const stream = fs.createReadStream(filePath); + return await this.fromStreamAsync(stream, this.clientOptions); + } + else if (this.apiKey) { + const client = await this.fromAPIKey(this.apiKey, this.clientOptions); + client.scopes = this.scopes; + const { credential } = await __classPrivateFieldGet(this, _GoogleAuth_instances, "m", _GoogleAuth_prepareAndCacheClient).call(this, client); + return credential; + } + else { + const { credential } = await this.getApplicationDefaultAsync(this.clientOptions); + return credential; + } +}; +/** + * Export DefaultTransporter as a static property of the class. + */ +GoogleAuth.DefaultTransporter = transporters_1.DefaultTransporter; + + +/***/ }), + +/***/ 7009: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2014 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.IAMAuth = void 0; +class IAMAuth { + /** + * IAM credentials. + * + * @param selector the iam authority selector + * @param token the token + * @constructor + */ + constructor(selector, token) { + this.selector = selector; + this.token = token; + this.selector = selector; + this.token = token; + } + /** + * Acquire the HTTP headers required to make an authenticated request. + */ + getRequestHeaders() { + return { + 'x-goog-iam-authority-selector': this.selector, + 'x-goog-iam-authorization-token': this.token, + }; + } +} +exports.IAMAuth = IAMAuth; + + +/***/ }), + +/***/ 9960: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.IdentityPoolClient = void 0; +const baseexternalclient_1 = __nccwpck_require__(142); +const util_1 = __nccwpck_require__(7870); +const filesubjecttokensupplier_1 = __nccwpck_require__(932); +const urlsubjecttokensupplier_1 = __nccwpck_require__(4627); +/** + * Defines the Url-sourced and file-sourced external account clients mainly + * used for K8s and Azure workloads. + */ +class IdentityPoolClient extends baseexternalclient_1.BaseExternalAccountClient { + /** + * Instantiate an IdentityPoolClient instance using the provided JSON + * object loaded from an external account credentials file. + * An error is thrown if the credential is not a valid file-sourced or + * url-sourced credential or a workforce pool user project is provided + * with a non workforce audience. + * @param options The external account options object typically loaded + * from the external account JSON credential file. The camelCased options + * are aliases for the snake_cased options. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + super(options, additionalOptions); + const opts = (0, util_1.originalOrCamelOptions)(options); + const credentialSource = opts.get('credential_source'); + const subjectTokenSupplier = opts.get('subject_token_supplier'); + // Validate credential sourcing configuration. + if (!credentialSource && !subjectTokenSupplier) { + throw new Error('A credential source or subject token supplier must be specified.'); + } + if (credentialSource && subjectTokenSupplier) { + throw new Error('Only one of credential source or subject token supplier can be specified.'); + } + if (subjectTokenSupplier) { + this.subjectTokenSupplier = subjectTokenSupplier; + this.credentialSourceType = 'programmatic'; + } + else { + const credentialSourceOpts = (0, util_1.originalOrCamelOptions)(credentialSource); + const formatOpts = (0, util_1.originalOrCamelOptions)(credentialSourceOpts.get('format')); + // Text is the default format type. + const formatType = formatOpts.get('type') || 'text'; + const formatSubjectTokenFieldName = formatOpts.get('subject_token_field_name'); + if (formatType !== 'json' && formatType !== 'text') { + throw new Error(`Invalid credential_source format "${formatType}"`); + } + if (formatType === 'json' && !formatSubjectTokenFieldName) { + throw new Error('Missing subject_token_field_name for JSON credential_source format'); + } + const file = credentialSourceOpts.get('file'); + const url = credentialSourceOpts.get('url'); + const headers = credentialSourceOpts.get('headers'); + if (file && url) { + throw new Error('No valid Identity Pool "credential_source" provided, must be either file or url.'); + } + else if (file && !url) { + this.credentialSourceType = 'file'; + this.subjectTokenSupplier = new filesubjecttokensupplier_1.FileSubjectTokenSupplier({ + filePath: file, + formatType: formatType, + subjectTokenFieldName: formatSubjectTokenFieldName, + }); + } + else if (!file && url) { + this.credentialSourceType = 'url'; + this.subjectTokenSupplier = new urlsubjecttokensupplier_1.UrlSubjectTokenSupplier({ + url: url, + formatType: formatType, + subjectTokenFieldName: formatSubjectTokenFieldName, + headers: headers, + additionalGaxiosOptions: IdentityPoolClient.RETRY_CONFIG, + }); + } + else { + throw new Error('No valid Identity Pool "credential_source" provided, must be either file or url.'); + } + } + } + /** + * Triggered when a external subject token is needed to be exchanged for a GCP + * access token via GCP STS endpoint. Gets a subject token by calling + * the configured {@link SubjectTokenSupplier} + * @return A promise that resolves with the external subject token. + */ + async retrieveSubjectToken() { + return this.subjectTokenSupplier.getSubjectToken(this.supplierContext); + } +} +exports.IdentityPoolClient = IdentityPoolClient; + + +/***/ }), + +/***/ 2718: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.IdTokenClient = void 0; +const oauth2client_1 = __nccwpck_require__(91); +class IdTokenClient extends oauth2client_1.OAuth2Client { + /** + * Google ID Token client + * + * Retrieve ID token from the metadata server. + * See: https://cloud.google.com/docs/authentication/get-id-token#metadata-server + */ + constructor(options) { + super(options); + this.targetAudience = options.targetAudience; + this.idTokenProvider = options.idTokenProvider; + } + async getRequestMetadataAsync( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + url) { + if (!this.credentials.id_token || + !this.credentials.expiry_date || + this.isTokenExpiring()) { + const idToken = await this.idTokenProvider.fetchIdToken(this.targetAudience); + this.credentials = { + id_token: idToken, + expiry_date: this.getIdTokenExpiryDate(idToken), + }; + } + const headers = { + Authorization: 'Bearer ' + this.credentials.id_token, + }; + return { headers }; + } + getIdTokenExpiryDate(idToken) { + const payloadB64 = idToken.split('.')[1]; + if (payloadB64) { + const payload = JSON.parse(Buffer.from(payloadB64, 'base64').toString('ascii')); + return payload.exp * 1000; + } + } +} +exports.IdTokenClient = IdTokenClient; + + +/***/ }), + +/***/ 9964: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/** + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Impersonated = exports.IMPERSONATED_ACCOUNT_TYPE = void 0; +const oauth2client_1 = __nccwpck_require__(91); +const gaxios_1 = __nccwpck_require__(7003); +const util_1 = __nccwpck_require__(7870); +exports.IMPERSONATED_ACCOUNT_TYPE = 'impersonated_service_account'; +class Impersonated extends oauth2client_1.OAuth2Client { + /** + * Impersonated service account credentials. + * + * Create a new access token by impersonating another service account. + * + * Impersonated Credentials allowing credentials issued to a user or + * service account to impersonate another. The source project using + * Impersonated Credentials must enable the "IAMCredentials" API. + * Also, the target service account must grant the orginating principal + * the "Service Account Token Creator" IAM role. + * + * @param {object} options - The configuration object. + * @param {object} [options.sourceClient] the source credential used as to + * acquire the impersonated credentials. + * @param {string} [options.targetPrincipal] the service account to + * impersonate. + * @param {string[]} [options.delegates] the chained list of delegates + * required to grant the final access_token. If set, the sequence of + * identities must have "Service Account Token Creator" capability granted to + * the preceding identity. For example, if set to [serviceAccountB, + * serviceAccountC], the sourceCredential must have the Token Creator role on + * serviceAccountB. serviceAccountB must have the Token Creator on + * serviceAccountC. Finally, C must have Token Creator on target_principal. + * If left unset, sourceCredential must have that role on targetPrincipal. + * @param {string[]} [options.targetScopes] scopes to request during the + * authorization grant. + * @param {number} [options.lifetime] number of seconds the delegated + * credential should be valid for up to 3600 seconds by default, or 43,200 + * seconds by extending the token's lifetime, see: + * https://cloud.google.com/iam/docs/creating-short-lived-service-account-credentials#sa-credentials-oauth + * @param {string} [options.endpoint] api endpoint override. + */ + constructor(options = {}) { + var _a, _b, _c, _d, _e, _f; + super(options); + // Start with an expired refresh token, which will automatically be + // refreshed before the first API call is made. + this.credentials = { + expiry_date: 1, + refresh_token: 'impersonated-placeholder', + }; + this.sourceClient = (_a = options.sourceClient) !== null && _a !== void 0 ? _a : new oauth2client_1.OAuth2Client(); + this.targetPrincipal = (_b = options.targetPrincipal) !== null && _b !== void 0 ? _b : ''; + this.delegates = (_c = options.delegates) !== null && _c !== void 0 ? _c : []; + this.targetScopes = (_d = options.targetScopes) !== null && _d !== void 0 ? _d : []; + this.lifetime = (_e = options.lifetime) !== null && _e !== void 0 ? _e : 3600; + const usingExplicitUniverseDomain = !!(0, util_1.originalOrCamelOptions)(options).get('universe_domain'); + if (!usingExplicitUniverseDomain) { + // override the default universe with the source's universe + this.universeDomain = this.sourceClient.universeDomain; + } + else if (this.sourceClient.universeDomain !== this.universeDomain) { + // non-default universe and is not matching the source - this could be a credential leak + throw new RangeError(`Universe domain ${this.sourceClient.universeDomain} in source credentials does not match ${this.universeDomain} universe domain set for impersonated credentials.`); + } + this.endpoint = + (_f = options.endpoint) !== null && _f !== void 0 ? _f : `https://iamcredentials.${this.universeDomain}`; + } + /** + * Signs some bytes. + * + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob Reference Documentation} + * @param blobToSign String to sign. + * + * @returns A {@link SignBlobResponse} denoting the keyID and signedBlob in base64 string + */ + async sign(blobToSign) { + await this.sourceClient.getAccessToken(); + const name = `projects/-/serviceAccounts/${this.targetPrincipal}`; + const u = `${this.endpoint}/v1/${name}:signBlob`; + const body = { + delegates: this.delegates, + payload: Buffer.from(blobToSign).toString('base64'), + }; + const res = await this.sourceClient.request({ + ...Impersonated.RETRY_CONFIG, + url: u, + data: body, + method: 'POST', + }); + return res.data; + } + /** The service account email to be impersonated. */ + getTargetPrincipal() { + return this.targetPrincipal; + } + /** + * Refreshes the access token. + */ + async refreshToken() { + var _a, _b, _c, _d, _e, _f; + try { + await this.sourceClient.getAccessToken(); + const name = 'projects/-/serviceAccounts/' + this.targetPrincipal; + const u = `${this.endpoint}/v1/${name}:generateAccessToken`; + const body = { + delegates: this.delegates, + scope: this.targetScopes, + lifetime: this.lifetime + 's', + }; + const res = await this.sourceClient.request({ + ...Impersonated.RETRY_CONFIG, + url: u, + data: body, + method: 'POST', + }); + const tokenResponse = res.data; + this.credentials.access_token = tokenResponse.accessToken; + this.credentials.expiry_date = Date.parse(tokenResponse.expireTime); + return { + tokens: this.credentials, + res, + }; + } + catch (error) { + if (!(error instanceof Error)) + throw error; + let status = 0; + let message = ''; + if (error instanceof gaxios_1.GaxiosError) { + status = (_c = (_b = (_a = error === null || error === void 0 ? void 0 : error.response) === null || _a === void 0 ? void 0 : _a.data) === null || _b === void 0 ? void 0 : _b.error) === null || _c === void 0 ? void 0 : _c.status; + message = (_f = (_e = (_d = error === null || error === void 0 ? void 0 : error.response) === null || _d === void 0 ? void 0 : _d.data) === null || _e === void 0 ? void 0 : _e.error) === null || _f === void 0 ? void 0 : _f.message; + } + if (status && message) { + error.message = `${status}: unable to impersonate: ${message}`; + throw error; + } + else { + error.message = `unable to impersonate: ${error}`; + throw error; + } + } + } + /** + * Generates an OpenID Connect ID token for a service account. + * + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/generateIdToken Reference Documentation} + * + * @param targetAudience the audience for the fetched ID token. + * @param options the for the request + * @return an OpenID Connect ID token + */ + async fetchIdToken(targetAudience, options) { + var _a, _b; + await this.sourceClient.getAccessToken(); + const name = `projects/-/serviceAccounts/${this.targetPrincipal}`; + const u = `${this.endpoint}/v1/${name}:generateIdToken`; + const body = { + delegates: this.delegates, + audience: targetAudience, + includeEmail: (_a = options === null || options === void 0 ? void 0 : options.includeEmail) !== null && _a !== void 0 ? _a : true, + useEmailAzp: (_b = options === null || options === void 0 ? void 0 : options.includeEmail) !== null && _b !== void 0 ? _b : true, + }; + const res = await this.sourceClient.request({ + ...Impersonated.RETRY_CONFIG, + url: u, + data: body, + method: 'POST', + }); + return res.data.token; + } +} +exports.Impersonated = Impersonated; + + +/***/ }), + +/***/ 7060: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2015 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.JWTAccess = void 0; +const jws = __nccwpck_require__(3324); +const util_1 = __nccwpck_require__(7870); +const DEFAULT_HEADER = { + alg: 'RS256', + typ: 'JWT', +}; +class JWTAccess { + /** + * JWTAccess service account credentials. + * + * Create a new access token by using the credential to create a new JWT token + * that's recognized as the access token. + * + * @param email the service account email address. + * @param key the private key that will be used to sign the token. + * @param keyId the ID of the private key used to sign the token. + */ + constructor(email, key, keyId, eagerRefreshThresholdMillis) { + this.cache = new util_1.LRUCache({ + capacity: 500, + maxAge: 60 * 60 * 1000, + }); + this.email = email; + this.key = key; + this.keyId = keyId; + this.eagerRefreshThresholdMillis = + eagerRefreshThresholdMillis !== null && eagerRefreshThresholdMillis !== void 0 ? eagerRefreshThresholdMillis : 5 * 60 * 1000; + } + /** + * Ensures that we're caching a key appropriately, giving precedence to scopes vs. url + * + * @param url The URI being authorized. + * @param scopes The scope or scopes being authorized + * @returns A string that returns the cached key. + */ + getCachedKey(url, scopes) { + let cacheKey = url; + if (scopes && Array.isArray(scopes) && scopes.length) { + cacheKey = url ? `${url}_${scopes.join('_')}` : `${scopes.join('_')}`; + } + else if (typeof scopes === 'string') { + cacheKey = url ? `${url}_${scopes}` : scopes; + } + if (!cacheKey) { + throw Error('Scopes or url must be provided'); + } + return cacheKey; + } + /** + * Get a non-expired access token, after refreshing if necessary. + * + * @param url The URI being authorized. + * @param additionalClaims An object with a set of additional claims to + * include in the payload. + * @returns An object that includes the authorization header. + */ + getRequestHeaders(url, additionalClaims, scopes) { + // Return cached authorization headers, unless we are within + // eagerRefreshThresholdMillis ms of them expiring: + const key = this.getCachedKey(url, scopes); + const cachedToken = this.cache.get(key); + const now = Date.now(); + if (cachedToken && + cachedToken.expiration - now > this.eagerRefreshThresholdMillis) { + return cachedToken.headers; + } + const iat = Math.floor(Date.now() / 1000); + const exp = JWTAccess.getExpirationTime(iat); + let defaultClaims; + // Turn scopes into space-separated string + if (Array.isArray(scopes)) { + scopes = scopes.join(' '); + } + // If scopes are specified, sign with scopes + if (scopes) { + defaultClaims = { + iss: this.email, + sub: this.email, + scope: scopes, + exp, + iat, + }; + } + else { + defaultClaims = { + iss: this.email, + sub: this.email, + aud: url, + exp, + iat, + }; + } + // if additionalClaims are provided, ensure they do not collide with + // other required claims. + if (additionalClaims) { + for (const claim in defaultClaims) { + if (additionalClaims[claim]) { + throw new Error(`The '${claim}' property is not allowed when passing additionalClaims. This claim is included in the JWT by default.`); + } + } + } + const header = this.keyId + ? { ...DEFAULT_HEADER, kid: this.keyId } + : DEFAULT_HEADER; + const payload = Object.assign(defaultClaims, additionalClaims); + // Sign the jwt and add it to the cache + const signedJWT = jws.sign({ header, payload, secret: this.key }); + const headers = { Authorization: `Bearer ${signedJWT}` }; + this.cache.set(key, { + expiration: exp * 1000, + headers, + }); + return headers; + } + /** + * Returns an expiration time for the JWT token. + * + * @param iat The issued at time for the JWT. + * @returns An expiration time for the JWT. + */ + static getExpirationTime(iat) { + const exp = iat + 3600; // 3600 seconds = 1 hour + return exp; + } + /** + * Create a JWTAccess credentials instance using the given input options. + * @param json The input object. + */ + fromJSON(json) { + if (!json) { + throw new Error('Must pass in a JSON object containing the service account auth settings.'); + } + if (!json.client_email) { + throw new Error('The incoming JSON object does not contain a client_email field'); + } + if (!json.private_key) { + throw new Error('The incoming JSON object does not contain a private_key field'); + } + // Extract the relevant information from the json key file. + this.email = json.client_email; + this.key = json.private_key; + this.keyId = json.private_key_id; + this.projectId = json.project_id; + } + fromStream(inputStream, callback) { + if (callback) { + this.fromStreamAsync(inputStream).then(() => callback(), callback); + } + else { + return this.fromStreamAsync(inputStream); + } + } + fromStreamAsync(inputStream) { + return new Promise((resolve, reject) => { + if (!inputStream) { + reject(new Error('Must pass in a stream containing the service account auth settings.')); + } + let s = ''; + inputStream + .setEncoding('utf8') + .on('data', chunk => (s += chunk)) + .on('error', reject) + .on('end', () => { + try { + const data = JSON.parse(s); + this.fromJSON(data); + resolve(); + } + catch (err) { + reject(err); + } + }); + }); + } +} +exports.JWTAccess = JWTAccess; + + +/***/ }), + +/***/ 5277: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2013 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.JWT = void 0; +const gtoken_1 = __nccwpck_require__(8568); +const jwtaccess_1 = __nccwpck_require__(7060); +const oauth2client_1 = __nccwpck_require__(91); +const authclient_1 = __nccwpck_require__(4810); +class JWT extends oauth2client_1.OAuth2Client { + constructor(optionsOrEmail, keyFile, key, scopes, subject, keyId) { + const opts = optionsOrEmail && typeof optionsOrEmail === 'object' + ? optionsOrEmail + : { email: optionsOrEmail, keyFile, key, keyId, scopes, subject }; + super(opts); + this.email = opts.email; + this.keyFile = opts.keyFile; + this.key = opts.key; + this.keyId = opts.keyId; + this.scopes = opts.scopes; + this.subject = opts.subject; + this.additionalClaims = opts.additionalClaims; + // Start with an expired refresh token, which will automatically be + // refreshed before the first API call is made. + this.credentials = { refresh_token: 'jwt-placeholder', expiry_date: 1 }; + } + /** + * Creates a copy of the credential with the specified scopes. + * @param scopes List of requested scopes or a single scope. + * @return The cloned instance. + */ + createScoped(scopes) { + const jwt = new JWT(this); + jwt.scopes = scopes; + return jwt; + } + /** + * Obtains the metadata to be sent with the request. + * + * @param url the URI being authorized. + */ + async getRequestMetadataAsync(url) { + url = this.defaultServicePath ? `https://${this.defaultServicePath}/` : url; + const useSelfSignedJWT = (!this.hasUserScopes() && url) || + (this.useJWTAccessWithScope && this.hasAnyScopes()) || + this.universeDomain !== authclient_1.DEFAULT_UNIVERSE; + if (this.subject && this.universeDomain !== authclient_1.DEFAULT_UNIVERSE) { + throw new RangeError(`Service Account user is configured for the credential. Domain-wide delegation is not supported in universes other than ${authclient_1.DEFAULT_UNIVERSE}`); + } + if (!this.apiKey && useSelfSignedJWT) { + if (this.additionalClaims && + this.additionalClaims.target_audience) { + const { tokens } = await this.refreshToken(); + return { + headers: this.addSharedMetadataHeaders({ + Authorization: `Bearer ${tokens.id_token}`, + }), + }; + } + else { + // no scopes have been set, but a uri has been provided. Use JWTAccess + // credentials. + if (!this.access) { + this.access = new jwtaccess_1.JWTAccess(this.email, this.key, this.keyId, this.eagerRefreshThresholdMillis); + } + let scopes; + if (this.hasUserScopes()) { + scopes = this.scopes; + } + else if (!url) { + scopes = this.defaultScopes; + } + const useScopes = this.useJWTAccessWithScope || + this.universeDomain !== authclient_1.DEFAULT_UNIVERSE; + const headers = await this.access.getRequestHeaders(url !== null && url !== void 0 ? url : undefined, this.additionalClaims, + // Scopes take precedent over audience for signing, + // so we only provide them if `useJWTAccessWithScope` is on or + // if we are in a non-default universe + useScopes ? scopes : undefined); + return { headers: this.addSharedMetadataHeaders(headers) }; + } + } + else if (this.hasAnyScopes() || this.apiKey) { + return super.getRequestMetadataAsync(url); + } + else { + // If no audience, apiKey, or scopes are provided, we should not attempt + // to populate any headers: + return { headers: {} }; + } + } + /** + * Fetches an ID token. + * @param targetAudience the audience for the fetched ID token. + */ + async fetchIdToken(targetAudience) { + // Create a new gToken for fetching an ID token + const gtoken = new gtoken_1.GoogleToken({ + iss: this.email, + sub: this.subject, + scope: this.scopes || this.defaultScopes, + keyFile: this.keyFile, + key: this.key, + additionalClaims: { target_audience: targetAudience }, + transporter: this.transporter, + }); + await gtoken.getToken({ + forceRefresh: true, + }); + if (!gtoken.idToken) { + throw new Error('Unknown error: Failed to fetch ID token'); + } + return gtoken.idToken; + } + /** + * Determine if there are currently scopes available. + */ + hasUserScopes() { + if (!this.scopes) { + return false; + } + return this.scopes.length > 0; + } + /** + * Are there any default or user scopes defined. + */ + hasAnyScopes() { + if (this.scopes && this.scopes.length > 0) + return true; + if (this.defaultScopes && this.defaultScopes.length > 0) + return true; + return false; + } + authorize(callback) { + if (callback) { + this.authorizeAsync().then(r => callback(null, r), callback); + } + else { + return this.authorizeAsync(); + } + } + async authorizeAsync() { + const result = await this.refreshToken(); + if (!result) { + throw new Error('No result returned'); + } + this.credentials = result.tokens; + this.credentials.refresh_token = 'jwt-placeholder'; + this.key = this.gtoken.key; + this.email = this.gtoken.iss; + return result.tokens; + } + /** + * Refreshes the access token. + * @param refreshToken ignored + * @private + */ + async refreshTokenNoCache( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + refreshToken) { + const gtoken = this.createGToken(); + const token = await gtoken.getToken({ + forceRefresh: this.isTokenExpiring(), + }); + const tokens = { + access_token: token.access_token, + token_type: 'Bearer', + expiry_date: gtoken.expiresAt, + id_token: gtoken.idToken, + }; + this.emit('tokens', tokens); + return { res: null, tokens }; + } + /** + * Create a gToken if it doesn't already exist. + */ + createGToken() { + if (!this.gtoken) { + this.gtoken = new gtoken_1.GoogleToken({ + iss: this.email, + sub: this.subject, + scope: this.scopes || this.defaultScopes, + keyFile: this.keyFile, + key: this.key, + additionalClaims: this.additionalClaims, + transporter: this.transporter, + }); + } + return this.gtoken; + } + /** + * Create a JWT credentials instance using the given input options. + * @param json The input object. + * + * @remarks + * + * **Important**: If you accept a credential configuration (credential JSON/File/Stream) from an external source for authentication to Google Cloud, you must validate it before providing it to any Google API or library. Providing an unvalidated credential configuration to Google APIs can compromise the security of your systems and data. For more information, refer to {@link https://cloud.google.com/docs/authentication/external/externally-sourced-credentials Validate credential configurations from external sources}. + */ + fromJSON(json) { + if (!json) { + throw new Error('Must pass in a JSON object containing the service account auth settings.'); + } + if (!json.client_email) { + throw new Error('The incoming JSON object does not contain a client_email field'); + } + if (!json.private_key) { + throw new Error('The incoming JSON object does not contain a private_key field'); + } + // Extract the relevant information from the json key file. + this.email = json.client_email; + this.key = json.private_key; + this.keyId = json.private_key_id; + this.projectId = json.project_id; + this.quotaProjectId = json.quota_project_id; + this.universeDomain = json.universe_domain || this.universeDomain; + } + fromStream(inputStream, callback) { + if (callback) { + this.fromStreamAsync(inputStream).then(() => callback(), callback); + } + else { + return this.fromStreamAsync(inputStream); + } + } + fromStreamAsync(inputStream) { + return new Promise((resolve, reject) => { + if (!inputStream) { + throw new Error('Must pass in a stream containing the service account auth settings.'); + } + let s = ''; + inputStream + .setEncoding('utf8') + .on('error', reject) + .on('data', chunk => (s += chunk)) + .on('end', () => { + try { + const data = JSON.parse(s); + this.fromJSON(data); + resolve(); + } + catch (e) { + reject(e); + } + }); + }); + } + /** + * Creates a JWT credentials instance using an API Key for authentication. + * @param apiKey The API Key in string form. + */ + fromAPIKey(apiKey) { + if (typeof apiKey !== 'string') { + throw new Error('Must provide an API Key string.'); + } + this.apiKey = apiKey; + } + /** + * Using the key or keyFile on the JWT client, obtain an object that contains + * the key and the client email. + */ + async getCredentials() { + if (this.key) { + return { private_key: this.key, client_email: this.email }; + } + else if (this.keyFile) { + const gtoken = this.createGToken(); + const creds = await gtoken.getCredentials(this.keyFile); + return { private_key: creds.privateKey, client_email: creds.clientEmail }; + } + throw new Error('A key or a keyFile must be provided to getCredentials.'); + } +} +exports.JWT = JWT; + + +/***/ }), + +/***/ 3882: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2014 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.LoginTicket = void 0; +class LoginTicket { + /** + * Create a simple class to extract user ID from an ID Token + * + * @param {string} env Envelope of the jwt + * @param {TokenPayload} pay Payload of the jwt + * @constructor + */ + constructor(env, pay) { + this.envelope = env; + this.payload = pay; + } + getEnvelope() { + return this.envelope; + } + getPayload() { + return this.payload; + } + /** + * Create a simple class to extract user ID from an ID Token + * + * @return The user ID + */ + getUserId() { + const payload = this.getPayload(); + if (payload && payload.sub) { + return payload.sub; + } + return null; + } + /** + * Returns attributes from the login ticket. This can contain + * various information about the user session. + * + * @return The envelope and payload + */ + getAttributes() { + return { envelope: this.getEnvelope(), payload: this.getPayload() }; + } +} +exports.LoginTicket = LoginTicket; + + +/***/ }), + +/***/ 91: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.OAuth2Client = exports.ClientAuthentication = exports.CertificateFormat = exports.CodeChallengeMethod = void 0; +const gaxios_1 = __nccwpck_require__(7003); +const querystring = __nccwpck_require__(3480); +const stream = __nccwpck_require__(2203); +const formatEcdsa = __nccwpck_require__(325); +const crypto_1 = __nccwpck_require__(8851); +const authclient_1 = __nccwpck_require__(4810); +const loginticket_1 = __nccwpck_require__(3882); +var CodeChallengeMethod; +(function (CodeChallengeMethod) { + CodeChallengeMethod["Plain"] = "plain"; + CodeChallengeMethod["S256"] = "S256"; +})(CodeChallengeMethod || (exports.CodeChallengeMethod = CodeChallengeMethod = {})); +var CertificateFormat; +(function (CertificateFormat) { + CertificateFormat["PEM"] = "PEM"; + CertificateFormat["JWK"] = "JWK"; +})(CertificateFormat || (exports.CertificateFormat = CertificateFormat = {})); +/** + * The client authentication type. Supported values are basic, post, and none. + * https://datatracker.ietf.org/doc/html/rfc7591#section-2 + */ +var ClientAuthentication; +(function (ClientAuthentication) { + ClientAuthentication["ClientSecretPost"] = "ClientSecretPost"; + ClientAuthentication["ClientSecretBasic"] = "ClientSecretBasic"; + ClientAuthentication["None"] = "None"; +})(ClientAuthentication || (exports.ClientAuthentication = ClientAuthentication = {})); +class OAuth2Client extends authclient_1.AuthClient { + constructor(optionsOrClientId, clientSecret, redirectUri) { + const opts = optionsOrClientId && typeof optionsOrClientId === 'object' + ? optionsOrClientId + : { clientId: optionsOrClientId, clientSecret, redirectUri }; + super(opts); + this.certificateCache = {}; + this.certificateExpiry = null; + this.certificateCacheFormat = CertificateFormat.PEM; + this.refreshTokenPromises = new Map(); + this._clientId = opts.clientId; + this._clientSecret = opts.clientSecret; + this.redirectUri = opts.redirectUri; + this.endpoints = { + tokenInfoUrl: 'https://oauth2.googleapis.com/tokeninfo', + oauth2AuthBaseUrl: 'https://accounts.google.com/o/oauth2/v2/auth', + oauth2TokenUrl: 'https://oauth2.googleapis.com/token', + oauth2RevokeUrl: 'https://oauth2.googleapis.com/revoke', + oauth2FederatedSignonPemCertsUrl: 'https://www.googleapis.com/oauth2/v1/certs', + oauth2FederatedSignonJwkCertsUrl: 'https://www.googleapis.com/oauth2/v3/certs', + oauth2IapPublicKeyUrl: 'https://www.gstatic.com/iap/verify/public_key', + ...opts.endpoints, + }; + this.clientAuthentication = + opts.clientAuthentication || ClientAuthentication.ClientSecretPost; + this.issuers = opts.issuers || [ + 'accounts.google.com', + 'https://accounts.google.com', + this.universeDomain, + ]; + } + /** + * Generates URL for consent page landing. + * @param opts Options. + * @return URL to consent page. + */ + generateAuthUrl(opts = {}) { + if (opts.code_challenge_method && !opts.code_challenge) { + throw new Error('If a code_challenge_method is provided, code_challenge must be included.'); + } + opts.response_type = opts.response_type || 'code'; + opts.client_id = opts.client_id || this._clientId; + opts.redirect_uri = opts.redirect_uri || this.redirectUri; + // Allow scopes to be passed either as array or a string + if (Array.isArray(opts.scope)) { + opts.scope = opts.scope.join(' '); + } + const rootUrl = this.endpoints.oauth2AuthBaseUrl.toString(); + return (rootUrl + + '?' + + querystring.stringify(opts)); + } + generateCodeVerifier() { + // To make the code compatible with browser SubtleCrypto we need to make + // this method async. + throw new Error('generateCodeVerifier is removed, please use generateCodeVerifierAsync instead.'); + } + /** + * Convenience method to automatically generate a code_verifier, and its + * resulting SHA256. If used, this must be paired with a S256 + * code_challenge_method. + * + * For a full example see: + * https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/oauth2-codeVerifier.js + */ + async generateCodeVerifierAsync() { + // base64 encoding uses 6 bits per character, and we want to generate128 + // characters. 6*128/8 = 96. + const crypto = (0, crypto_1.createCrypto)(); + const randomString = crypto.randomBytesBase64(96); + // The valid characters in the code_verifier are [A-Z]/[a-z]/[0-9]/ + // "-"/"."/"_"/"~". Base64 encoded strings are pretty close, so we're just + // swapping out a few chars. + const codeVerifier = randomString + .replace(/\+/g, '~') + .replace(/=/g, '_') + .replace(/\//g, '-'); + // Generate the base64 encoded SHA256 + const unencodedCodeChallenge = await crypto.sha256DigestBase64(codeVerifier); + // We need to use base64UrlEncoding instead of standard base64 + const codeChallenge = unencodedCodeChallenge + .split('=')[0] + .replace(/\+/g, '-') + .replace(/\//g, '_'); + return { codeVerifier, codeChallenge }; + } + getToken(codeOrOptions, callback) { + const options = typeof codeOrOptions === 'string' ? { code: codeOrOptions } : codeOrOptions; + if (callback) { + this.getTokenAsync(options).then(r => callback(null, r.tokens, r.res), e => callback(e, null, e.response)); + } + else { + return this.getTokenAsync(options); + } + } + async getTokenAsync(options) { + const url = this.endpoints.oauth2TokenUrl.toString(); + const headers = { + 'Content-Type': 'application/x-www-form-urlencoded', + }; + const values = { + client_id: options.client_id || this._clientId, + code_verifier: options.codeVerifier, + code: options.code, + grant_type: 'authorization_code', + redirect_uri: options.redirect_uri || this.redirectUri, + }; + if (this.clientAuthentication === ClientAuthentication.ClientSecretBasic) { + const basic = Buffer.from(`${this._clientId}:${this._clientSecret}`); + headers['Authorization'] = `Basic ${basic.toString('base64')}`; + } + if (this.clientAuthentication === ClientAuthentication.ClientSecretPost) { + values.client_secret = this._clientSecret; + } + const res = await this.transporter.request({ + ...OAuth2Client.RETRY_CONFIG, + method: 'POST', + url, + data: querystring.stringify(values), + headers, + }); + const tokens = res.data; + if (res.data && res.data.expires_in) { + tokens.expiry_date = new Date().getTime() + res.data.expires_in * 1000; + delete tokens.expires_in; + } + this.emit('tokens', tokens); + return { tokens, res }; + } + /** + * Refreshes the access token. + * @param refresh_token Existing refresh token. + * @private + */ + async refreshToken(refreshToken) { + if (!refreshToken) { + return this.refreshTokenNoCache(refreshToken); + } + // If a request to refresh using the same token has started, + // return the same promise. + if (this.refreshTokenPromises.has(refreshToken)) { + return this.refreshTokenPromises.get(refreshToken); + } + const p = this.refreshTokenNoCache(refreshToken).then(r => { + this.refreshTokenPromises.delete(refreshToken); + return r; + }, e => { + this.refreshTokenPromises.delete(refreshToken); + throw e; + }); + this.refreshTokenPromises.set(refreshToken, p); + return p; + } + async refreshTokenNoCache(refreshToken) { + var _a; + if (!refreshToken) { + throw new Error('No refresh token is set.'); + } + const url = this.endpoints.oauth2TokenUrl.toString(); + const data = { + refresh_token: refreshToken, + client_id: this._clientId, + client_secret: this._clientSecret, + grant_type: 'refresh_token', + }; + let res; + try { + // request for new token + res = await this.transporter.request({ + ...OAuth2Client.RETRY_CONFIG, + method: 'POST', + url, + data: querystring.stringify(data), + headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, + }); + } + catch (e) { + if (e instanceof gaxios_1.GaxiosError && + e.message === 'invalid_grant' && + ((_a = e.response) === null || _a === void 0 ? void 0 : _a.data) && + /ReAuth/i.test(e.response.data.error_description)) { + e.message = JSON.stringify(e.response.data); + } + throw e; + } + const tokens = res.data; + // TODO: de-duplicate this code from a few spots + if (res.data && res.data.expires_in) { + tokens.expiry_date = new Date().getTime() + res.data.expires_in * 1000; + delete tokens.expires_in; + } + this.emit('tokens', tokens); + return { tokens, res }; + } + refreshAccessToken(callback) { + if (callback) { + this.refreshAccessTokenAsync().then(r => callback(null, r.credentials, r.res), callback); + } + else { + return this.refreshAccessTokenAsync(); + } + } + async refreshAccessTokenAsync() { + const r = await this.refreshToken(this.credentials.refresh_token); + const tokens = r.tokens; + tokens.refresh_token = this.credentials.refresh_token; + this.credentials = tokens; + return { credentials: this.credentials, res: r.res }; + } + getAccessToken(callback) { + if (callback) { + this.getAccessTokenAsync().then(r => callback(null, r.token, r.res), callback); + } + else { + return this.getAccessTokenAsync(); + } + } + async getAccessTokenAsync() { + const shouldRefresh = !this.credentials.access_token || this.isTokenExpiring(); + if (shouldRefresh) { + if (!this.credentials.refresh_token) { + if (this.refreshHandler) { + const refreshedAccessToken = await this.processAndValidateRefreshHandler(); + if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { + this.setCredentials(refreshedAccessToken); + return { token: this.credentials.access_token }; + } + } + else { + throw new Error('No refresh token or refresh handler callback is set.'); + } + } + const r = await this.refreshAccessTokenAsync(); + if (!r.credentials || (r.credentials && !r.credentials.access_token)) { + throw new Error('Could not refresh access token.'); + } + return { token: r.credentials.access_token, res: r.res }; + } + else { + return { token: this.credentials.access_token }; + } + } + /** + * The main authentication interface. It takes an optional url which when + * present is the endpoint being accessed, and returns a Promise which + * resolves with authorization header fields. + * + * In OAuth2Client, the result has the form: + * { Authorization: 'Bearer ' } + * @param url The optional url being authorized + */ + async getRequestHeaders(url) { + const headers = (await this.getRequestMetadataAsync(url)).headers; + return headers; + } + async getRequestMetadataAsync( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + url) { + const thisCreds = this.credentials; + if (!thisCreds.access_token && + !thisCreds.refresh_token && + !this.apiKey && + !this.refreshHandler) { + throw new Error('No access, refresh token, API key or refresh handler callback is set.'); + } + if (thisCreds.access_token && !this.isTokenExpiring()) { + thisCreds.token_type = thisCreds.token_type || 'Bearer'; + const headers = { + Authorization: thisCreds.token_type + ' ' + thisCreds.access_token, + }; + return { headers: this.addSharedMetadataHeaders(headers) }; + } + // If refreshHandler exists, call processAndValidateRefreshHandler(). + if (this.refreshHandler) { + const refreshedAccessToken = await this.processAndValidateRefreshHandler(); + if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { + this.setCredentials(refreshedAccessToken); + const headers = { + Authorization: 'Bearer ' + this.credentials.access_token, + }; + return { headers: this.addSharedMetadataHeaders(headers) }; + } + } + if (this.apiKey) { + return { headers: { 'X-Goog-Api-Key': this.apiKey } }; + } + let r = null; + let tokens = null; + try { + r = await this.refreshToken(thisCreds.refresh_token); + tokens = r.tokens; + } + catch (err) { + const e = err; + if (e.response && + (e.response.status === 403 || e.response.status === 404)) { + e.message = `Could not refresh access token: ${e.message}`; + } + throw e; + } + const credentials = this.credentials; + credentials.token_type = credentials.token_type || 'Bearer'; + tokens.refresh_token = credentials.refresh_token; + this.credentials = tokens; + const headers = { + Authorization: credentials.token_type + ' ' + tokens.access_token, + }; + return { headers: this.addSharedMetadataHeaders(headers), res: r.res }; + } + /** + * Generates an URL to revoke the given token. + * @param token The existing token to be revoked. + * + * @deprecated use instance method {@link OAuth2Client.getRevokeTokenURL} + */ + static getRevokeTokenUrl(token) { + return new OAuth2Client().getRevokeTokenURL(token).toString(); + } + /** + * Generates a URL to revoke the given token. + * + * @param token The existing token to be revoked. + */ + getRevokeTokenURL(token) { + const url = new URL(this.endpoints.oauth2RevokeUrl); + url.searchParams.append('token', token); + return url; + } + revokeToken(token, callback) { + const opts = { + ...OAuth2Client.RETRY_CONFIG, + url: this.getRevokeTokenURL(token).toString(), + method: 'POST', + }; + if (callback) { + this.transporter + .request(opts) + .then(r => callback(null, r), callback); + } + else { + return this.transporter.request(opts); + } + } + revokeCredentials(callback) { + if (callback) { + this.revokeCredentialsAsync().then(res => callback(null, res), callback); + } + else { + return this.revokeCredentialsAsync(); + } + } + async revokeCredentialsAsync() { + const token = this.credentials.access_token; + this.credentials = {}; + if (token) { + return this.revokeToken(token); + } + else { + throw new Error('No access token to revoke.'); + } + } + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); + }); + } + else { + return this.requestAsync(opts); + } + } + async requestAsync(opts, reAuthRetried = false) { + let r2; + try { + const r = await this.getRequestMetadataAsync(opts.url); + opts.headers = opts.headers || {}; + if (r.headers && r.headers['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = r.headers['x-goog-user-project']; + } + if (r.headers && r.headers.Authorization) { + opts.headers.Authorization = r.headers.Authorization; + } + if (this.apiKey) { + opts.headers['X-Goog-Api-Key'] = this.apiKey; + } + r2 = await this.transporter.request(opts); + } + catch (e) { + const res = e.response; + if (res) { + const statusCode = res.status; + // Retry the request for metadata if the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - An access_token and refresh_token were available, but either no + // expiry_date was available or the forceRefreshOnFailure flag is set. + // The absent expiry_date case can happen when developers stash the + // access_token and refresh_token for later use, but the access_token + // fails on the first try because it's expired. Some developers may + // choose to enable forceRefreshOnFailure to mitigate time-related + // errors. + // Or the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - No refresh_token was available + // - An access_token and a refreshHandler callback were available, but + // either no expiry_date was available or the forceRefreshOnFailure + // flag is set. The access_token fails on the first try because it's + // expired. Some developers may choose to enable forceRefreshOnFailure + // to mitigate time-related errors. + const mayRequireRefresh = this.credentials && + this.credentials.access_token && + this.credentials.refresh_token && + (!this.credentials.expiry_date || this.forceRefreshOnFailure); + const mayRequireRefreshWithNoRefreshToken = this.credentials && + this.credentials.access_token && + !this.credentials.refresh_token && + (!this.credentials.expiry_date || this.forceRefreshOnFailure) && + this.refreshHandler; + const isReadableStream = res.config.data instanceof stream.Readable; + const isAuthErr = statusCode === 401 || statusCode === 403; + if (!reAuthRetried && + isAuthErr && + !isReadableStream && + mayRequireRefresh) { + await this.refreshAccessTokenAsync(); + return this.requestAsync(opts, true); + } + else if (!reAuthRetried && + isAuthErr && + !isReadableStream && + mayRequireRefreshWithNoRefreshToken) { + const refreshedAccessToken = await this.processAndValidateRefreshHandler(); + if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { + this.setCredentials(refreshedAccessToken); + } + return this.requestAsync(opts, true); + } + } + throw e; + } + return r2; + } + verifyIdToken(options, callback) { + // This function used to accept two arguments instead of an options object. + // Check the types to help users upgrade with less pain. + // This check can be removed after a 2.0 release. + if (callback && typeof callback !== 'function') { + throw new Error('This method accepts an options object as the first parameter, which includes the idToken, audience, and maxExpiry.'); + } + if (callback) { + this.verifyIdTokenAsync(options).then(r => callback(null, r), callback); + } + else { + return this.verifyIdTokenAsync(options); + } + } + async verifyIdTokenAsync(options) { + if (!options.idToken) { + throw new Error('The verifyIdToken method requires an ID Token'); + } + const response = await this.getFederatedSignonCertsAsync(); + const login = await this.verifySignedJwtWithCertsAsync(options.idToken, response.certs, options.audience, this.issuers, options.maxExpiry); + return login; + } + /** + * Obtains information about the provisioned access token. Especially useful + * if you want to check the scopes that were provisioned to a given token. + * + * @param accessToken Required. The Access Token for which you want to get + * user info. + */ + async getTokenInfo(accessToken) { + const { data } = await this.transporter.request({ + ...OAuth2Client.RETRY_CONFIG, + method: 'POST', + headers: { + 'Content-Type': 'application/x-www-form-urlencoded', + Authorization: `Bearer ${accessToken}`, + }, + url: this.endpoints.tokenInfoUrl.toString(), + }); + const info = Object.assign({ + expiry_date: new Date().getTime() + data.expires_in * 1000, + scopes: data.scope.split(' '), + }, data); + delete info.expires_in; + delete info.scope; + return info; + } + getFederatedSignonCerts(callback) { + if (callback) { + this.getFederatedSignonCertsAsync().then(r => callback(null, r.certs, r.res), callback); + } + else { + return this.getFederatedSignonCertsAsync(); + } + } + async getFederatedSignonCertsAsync() { + const nowTime = new Date().getTime(); + const format = (0, crypto_1.hasBrowserCrypto)() + ? CertificateFormat.JWK + : CertificateFormat.PEM; + if (this.certificateExpiry && + nowTime < this.certificateExpiry.getTime() && + this.certificateCacheFormat === format) { + return { certs: this.certificateCache, format }; + } + let res; + let url; + switch (format) { + case CertificateFormat.PEM: + url = this.endpoints.oauth2FederatedSignonPemCertsUrl.toString(); + break; + case CertificateFormat.JWK: + url = this.endpoints.oauth2FederatedSignonJwkCertsUrl.toString(); + break; + default: + throw new Error(`Unsupported certificate format ${format}`); + } + try { + res = await this.transporter.request({ + ...OAuth2Client.RETRY_CONFIG, + url, + }); + } + catch (e) { + if (e instanceof Error) { + e.message = `Failed to retrieve verification certificates: ${e.message}`; + } + throw e; + } + const cacheControl = res ? res.headers['cache-control'] : undefined; + let cacheAge = -1; + if (cacheControl) { + const pattern = new RegExp('max-age=([0-9]*)'); + const regexResult = pattern.exec(cacheControl); + if (regexResult && regexResult.length === 2) { + // Cache results with max-age (in seconds) + cacheAge = Number(regexResult[1]) * 1000; // milliseconds + } + } + let certificates = {}; + switch (format) { + case CertificateFormat.PEM: + certificates = res.data; + break; + case CertificateFormat.JWK: + for (const key of res.data.keys) { + certificates[key.kid] = key; + } + break; + default: + throw new Error(`Unsupported certificate format ${format}`); + } + const now = new Date(); + this.certificateExpiry = + cacheAge === -1 ? null : new Date(now.getTime() + cacheAge); + this.certificateCache = certificates; + this.certificateCacheFormat = format; + return { certs: certificates, format, res }; + } + getIapPublicKeys(callback) { + if (callback) { + this.getIapPublicKeysAsync().then(r => callback(null, r.pubkeys, r.res), callback); + } + else { + return this.getIapPublicKeysAsync(); + } + } + async getIapPublicKeysAsync() { + let res; + const url = this.endpoints.oauth2IapPublicKeyUrl.toString(); + try { + res = await this.transporter.request({ + ...OAuth2Client.RETRY_CONFIG, + url, + }); + } + catch (e) { + if (e instanceof Error) { + e.message = `Failed to retrieve verification certificates: ${e.message}`; + } + throw e; + } + return { pubkeys: res.data, res }; + } + verifySignedJwtWithCerts() { + // To make the code compatible with browser SubtleCrypto we need to make + // this method async. + throw new Error('verifySignedJwtWithCerts is removed, please use verifySignedJwtWithCertsAsync instead.'); + } + /** + * Verify the id token is signed with the correct certificate + * and is from the correct audience. + * @param jwt The jwt to verify (The ID Token in this case). + * @param certs The array of certs to test the jwt against. + * @param requiredAudience The audience to test the jwt against. + * @param issuers The allowed issuers of the jwt (Optional). + * @param maxExpiry The max expiry the certificate can be (Optional). + * @return Returns a promise resolving to LoginTicket on verification. + */ + async verifySignedJwtWithCertsAsync(jwt, certs, requiredAudience, issuers, maxExpiry) { + const crypto = (0, crypto_1.createCrypto)(); + if (!maxExpiry) { + maxExpiry = OAuth2Client.DEFAULT_MAX_TOKEN_LIFETIME_SECS_; + } + const segments = jwt.split('.'); + if (segments.length !== 3) { + throw new Error('Wrong number of segments in token: ' + jwt); + } + const signed = segments[0] + '.' + segments[1]; + let signature = segments[2]; + let envelope; + let payload; + try { + envelope = JSON.parse(crypto.decodeBase64StringUtf8(segments[0])); + } + catch (err) { + if (err instanceof Error) { + err.message = `Can't parse token envelope: ${segments[0]}': ${err.message}`; + } + throw err; + } + if (!envelope) { + throw new Error("Can't parse token envelope: " + segments[0]); + } + try { + payload = JSON.parse(crypto.decodeBase64StringUtf8(segments[1])); + } + catch (err) { + if (err instanceof Error) { + err.message = `Can't parse token payload '${segments[0]}`; + } + throw err; + } + if (!payload) { + throw new Error("Can't parse token payload: " + segments[1]); + } + if (!Object.prototype.hasOwnProperty.call(certs, envelope.kid)) { + // If this is not present, then there's no reason to attempt verification + throw new Error('No pem found for envelope: ' + JSON.stringify(envelope)); + } + const cert = certs[envelope.kid]; + if (envelope.alg === 'ES256') { + signature = formatEcdsa.joseToDer(signature, 'ES256').toString('base64'); + } + const verified = await crypto.verify(cert, signed, signature); + if (!verified) { + throw new Error('Invalid token signature: ' + jwt); + } + if (!payload.iat) { + throw new Error('No issue time in token: ' + JSON.stringify(payload)); + } + if (!payload.exp) { + throw new Error('No expiration time in token: ' + JSON.stringify(payload)); + } + const iat = Number(payload.iat); + if (isNaN(iat)) + throw new Error('iat field using invalid format'); + const exp = Number(payload.exp); + if (isNaN(exp)) + throw new Error('exp field using invalid format'); + const now = new Date().getTime() / 1000; + if (exp >= now + maxExpiry) { + throw new Error('Expiration time too far in future: ' + JSON.stringify(payload)); + } + const earliest = iat - OAuth2Client.CLOCK_SKEW_SECS_; + const latest = exp + OAuth2Client.CLOCK_SKEW_SECS_; + if (now < earliest) { + throw new Error('Token used too early, ' + + now + + ' < ' + + earliest + + ': ' + + JSON.stringify(payload)); + } + if (now > latest) { + throw new Error('Token used too late, ' + + now + + ' > ' + + latest + + ': ' + + JSON.stringify(payload)); + } + if (issuers && issuers.indexOf(payload.iss) < 0) { + throw new Error('Invalid issuer, expected one of [' + + issuers + + '], but got ' + + payload.iss); + } + // Check the audience matches if we have one + if (typeof requiredAudience !== 'undefined' && requiredAudience !== null) { + const aud = payload.aud; + let audVerified = false; + // If the requiredAudience is an array, check if it contains token + // audience + if (requiredAudience.constructor === Array) { + audVerified = requiredAudience.indexOf(aud) > -1; + } + else { + audVerified = aud === requiredAudience; + } + if (!audVerified) { + throw new Error('Wrong recipient, payload audience != requiredAudience'); + } + } + return new loginticket_1.LoginTicket(envelope, payload); + } + /** + * Returns a promise that resolves with AccessTokenResponse type if + * refreshHandler is defined. + * If not, nothing is returned. + */ + async processAndValidateRefreshHandler() { + if (this.refreshHandler) { + const accessTokenResponse = await this.refreshHandler(); + if (!accessTokenResponse.access_token) { + throw new Error('No access token is returned by the refreshHandler callback.'); + } + return accessTokenResponse; + } + return; + } + /** + * Returns true if a token is expired or will expire within + * eagerRefreshThresholdMillismilliseconds. + * If there is no expiry time, assumes the token is not expired or expiring. + */ + isTokenExpiring() { + const expiryDate = this.credentials.expiry_date; + return expiryDate + ? expiryDate <= new Date().getTime() + this.eagerRefreshThresholdMillis + : false; + } +} +exports.OAuth2Client = OAuth2Client; +/** + * @deprecated use instance's {@link OAuth2Client.endpoints} + */ +OAuth2Client.GOOGLE_TOKEN_INFO_URL = 'https://oauth2.googleapis.com/tokeninfo'; +/** + * Clock skew - five minutes in seconds + */ +OAuth2Client.CLOCK_SKEW_SECS_ = 300; +/** + * The default max Token Lifetime is one day in seconds + */ +OAuth2Client.DEFAULT_MAX_TOKEN_LIFETIME_SECS_ = 86400; + + +/***/ }), + +/***/ 6653: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.OAuthClientAuthHandler = void 0; +exports.getErrorFromOAuthErrorResponse = getErrorFromOAuthErrorResponse; +const querystring = __nccwpck_require__(3480); +const crypto_1 = __nccwpck_require__(8851); +/** List of HTTP methods that accept request bodies. */ +const METHODS_SUPPORTING_REQUEST_BODY = ['PUT', 'POST', 'PATCH']; +/** + * Abstract class for handling client authentication in OAuth-based + * operations. + * When request-body client authentication is used, only application/json and + * application/x-www-form-urlencoded content types for HTTP methods that support + * request bodies are supported. + */ +class OAuthClientAuthHandler { + /** + * Instantiates an OAuth client authentication handler. + * @param clientAuthentication The client auth credentials. + */ + constructor(clientAuthentication) { + this.clientAuthentication = clientAuthentication; + this.crypto = (0, crypto_1.createCrypto)(); + } + /** + * Applies client authentication on the OAuth request's headers or POST + * body but does not process the request. + * @param opts The GaxiosOptions whose headers or data are to be modified + * depending on the client authentication mechanism to be used. + * @param bearerToken The optional bearer token to use for authentication. + * When this is used, no client authentication credentials are needed. + */ + applyClientAuthenticationOptions(opts, bearerToken) { + // Inject authenticated header. + this.injectAuthenticatedHeaders(opts, bearerToken); + // Inject authenticated request body. + if (!bearerToken) { + this.injectAuthenticatedRequestBody(opts); + } + } + /** + * Applies client authentication on the request's header if either + * basic authentication or bearer token authentication is selected. + * + * @param opts The GaxiosOptions whose headers or data are to be modified + * depending on the client authentication mechanism to be used. + * @param bearerToken The optional bearer token to use for authentication. + * When this is used, no client authentication credentials are needed. + */ + injectAuthenticatedHeaders(opts, bearerToken) { + var _a; + // Bearer token prioritized higher than basic Auth. + if (bearerToken) { + opts.headers = opts.headers || {}; + Object.assign(opts.headers, { + Authorization: `Bearer ${bearerToken}}`, + }); + } + else if (((_a = this.clientAuthentication) === null || _a === void 0 ? void 0 : _a.confidentialClientType) === 'basic') { + opts.headers = opts.headers || {}; + const clientId = this.clientAuthentication.clientId; + const clientSecret = this.clientAuthentication.clientSecret || ''; + const base64EncodedCreds = this.crypto.encodeBase64StringUtf8(`${clientId}:${clientSecret}`); + Object.assign(opts.headers, { + Authorization: `Basic ${base64EncodedCreds}`, + }); + } + } + /** + * Applies client authentication on the request's body if request-body + * client authentication is selected. + * + * @param opts The GaxiosOptions whose headers or data are to be modified + * depending on the client authentication mechanism to be used. + */ + injectAuthenticatedRequestBody(opts) { + var _a; + if (((_a = this.clientAuthentication) === null || _a === void 0 ? void 0 : _a.confidentialClientType) === 'request-body') { + const method = (opts.method || 'GET').toUpperCase(); + // Inject authenticated request body. + if (METHODS_SUPPORTING_REQUEST_BODY.indexOf(method) !== -1) { + // Get content-type. + let contentType; + const headers = opts.headers || {}; + for (const key in headers) { + if (key.toLowerCase() === 'content-type' && headers[key]) { + contentType = headers[key].toLowerCase(); + break; + } + } + if (contentType === 'application/x-www-form-urlencoded') { + opts.data = opts.data || ''; + const data = querystring.parse(opts.data); + Object.assign(data, { + client_id: this.clientAuthentication.clientId, + client_secret: this.clientAuthentication.clientSecret || '', + }); + opts.data = querystring.stringify(data); + } + else if (contentType === 'application/json') { + opts.data = opts.data || {}; + Object.assign(opts.data, { + client_id: this.clientAuthentication.clientId, + client_secret: this.clientAuthentication.clientSecret || '', + }); + } + else { + throw new Error(`${contentType} content-types are not supported with ` + + `${this.clientAuthentication.confidentialClientType} ` + + 'client authentication'); + } + } + else { + throw new Error(`${method} HTTP method does not support ` + + `${this.clientAuthentication.confidentialClientType} ` + + 'client authentication'); + } + } + } + /** + * Retry config for Auth-related requests. + * + * @remarks + * + * This is not a part of the default {@link AuthClient.transporter transporter/gaxios} + * config as some downstream APIs would prefer if customers explicitly enable retries, + * such as GCS. + */ + static get RETRY_CONFIG() { + return { + retry: true, + retryConfig: { + httpMethodsToRetry: ['GET', 'PUT', 'POST', 'HEAD', 'OPTIONS', 'DELETE'], + }, + }; + } +} +exports.OAuthClientAuthHandler = OAuthClientAuthHandler; +/** + * Converts an OAuth error response to a native JavaScript Error. + * @param resp The OAuth error response to convert to a native Error object. + * @param err The optional original error. If provided, the error properties + * will be copied to the new error. + * @return The converted native Error object. + */ +function getErrorFromOAuthErrorResponse(resp, err) { + // Error response. + const errorCode = resp.error; + const errorDescription = resp.error_description; + const errorUri = resp.error_uri; + let message = `Error code ${errorCode}`; + if (typeof errorDescription !== 'undefined') { + message += `: ${errorDescription}`; + } + if (typeof errorUri !== 'undefined') { + message += ` - ${errorUri}`; + } + const newError = new Error(message); + // Copy properties from original error to newly generated error. + if (err) { + const keys = Object.keys(err); + if (err.stack) { + // Copy error.stack if available. + keys.push('stack'); + } + keys.forEach(key => { + // Do not overwrite the message field. + if (key !== 'message') { + Object.defineProperty(newError, key, { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + value: err[key], + writable: false, + enumerable: true, + }); + } + }); + } + return newError; +} + + +/***/ }), + +/***/ 2045: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PassThroughClient = void 0; +const authclient_1 = __nccwpck_require__(4810); +/** + * An AuthClient without any Authentication information. Useful for: + * - Anonymous access + * - Local Emulators + * - Testing Environments + * + */ +class PassThroughClient extends authclient_1.AuthClient { + /** + * Creates a request without any authentication headers or checks. + * + * @remarks + * + * In testing environments it may be useful to change the provided + * {@link AuthClient.transporter} for any desired request overrides/handling. + * + * @param opts + * @returns The response of the request. + */ + async request(opts) { + return this.transporter.request(opts); + } + /** + * A required method of the base class. + * Always will return an empty object. + * + * @returns {} + */ + async getAccessToken() { + return {}; + } + /** + * A required method of the base class. + * Always will return an empty object. + * + * @returns {} + */ + async getRequestHeaders() { + return {}; + } +} +exports.PassThroughClient = PassThroughClient; +const a = new PassThroughClient(); +a.getAccessToken(); + + +/***/ }), + +/***/ 6077: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PluggableAuthClient = exports.ExecutableError = void 0; +const baseexternalclient_1 = __nccwpck_require__(142); +const executable_response_1 = __nccwpck_require__(3247); +const pluggable_auth_handler_1 = __nccwpck_require__(908); +/** + * Error thrown from the executable run by PluggableAuthClient. + */ +class ExecutableError extends Error { + constructor(message, code) { + super(`The executable failed with exit code: ${code} and error message: ${message}.`); + this.code = code; + Object.setPrototypeOf(this, new.target.prototype); + } +} +exports.ExecutableError = ExecutableError; +/** + * The default executable timeout when none is provided, in milliseconds. + */ +const DEFAULT_EXECUTABLE_TIMEOUT_MILLIS = 30 * 1000; +/** + * The minimum allowed executable timeout in milliseconds. + */ +const MINIMUM_EXECUTABLE_TIMEOUT_MILLIS = 5 * 1000; +/** + * The maximum allowed executable timeout in milliseconds. + */ +const MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS = 120 * 1000; +/** + * The environment variable to check to see if executable can be run. + * Value must be set to '1' for the executable to run. + */ +const GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES = 'GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES'; +/** + * The maximum currently supported executable version. + */ +const MAXIMUM_EXECUTABLE_VERSION = 1; +/** + * PluggableAuthClient enables the exchange of workload identity pool external credentials for + * Google access tokens by retrieving 3rd party tokens through a user supplied executable. These + * scripts/executables are completely independent of the Google Cloud Auth libraries. These + * credentials plug into ADC and will call the specified executable to retrieve the 3rd party token + * to be exchanged for a Google access token. + * + *

To use these credentials, the GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment variable + * must be set to '1'. This is for security reasons. + * + *

Both OIDC and SAML are supported. The executable must adhere to a specific response format + * defined below. + * + *

The executable must print out the 3rd party token to STDOUT in JSON format. When an + * output_file is specified in the credential configuration, the executable must also handle writing the + * JSON response to this file. + * + *

+ * OIDC response sample:
+ * {
+ *   "version": 1,
+ *   "success": true,
+ *   "token_type": "urn:ietf:params:oauth:token-type:id_token",
+ *   "id_token": "HEADER.PAYLOAD.SIGNATURE",
+ *   "expiration_time": 1620433341
+ * }
+ *
+ * SAML2 response sample:
+ * {
+ *   "version": 1,
+ *   "success": true,
+ *   "token_type": "urn:ietf:params:oauth:token-type:saml2",
+ *   "saml_response": "...",
+ *   "expiration_time": 1620433341
+ * }
+ *
+ * Error response sample:
+ * {
+ *   "version": 1,
+ *   "success": false,
+ *   "code": "401",
+ *   "message": "Error message."
+ * }
+ * 
+ * + *

The "expiration_time" field in the JSON response is only required for successful + * responses when an output file was specified in the credential configuration + * + *

The auth libraries will populate certain environment variables that will be accessible by the + * executable, such as: GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE, GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE, + * GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE, GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL, and + * GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE. + * + *

Please see this repositories README for a complete executable request/response specification. + */ +class PluggableAuthClient extends baseexternalclient_1.BaseExternalAccountClient { + /** + * Instantiates a PluggableAuthClient instance using the provided JSON + * object loaded from an external account credentials file. + * An error is thrown if the credential is not a valid pluggable auth credential. + * @param options The external account options object typically loaded from + * the external account JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + super(options, additionalOptions); + if (!options.credential_source.executable) { + throw new Error('No valid Pluggable Auth "credential_source" provided.'); + } + this.command = options.credential_source.executable.command; + if (!this.command) { + throw new Error('No valid Pluggable Auth "credential_source" provided.'); + } + // Check if the provided timeout exists and if it is valid. + if (options.credential_source.executable.timeout_millis === undefined) { + this.timeoutMillis = DEFAULT_EXECUTABLE_TIMEOUT_MILLIS; + } + else { + this.timeoutMillis = options.credential_source.executable.timeout_millis; + if (this.timeoutMillis < MINIMUM_EXECUTABLE_TIMEOUT_MILLIS || + this.timeoutMillis > MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS) { + throw new Error(`Timeout must be between ${MINIMUM_EXECUTABLE_TIMEOUT_MILLIS} and ` + + `${MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS} milliseconds.`); + } + } + this.outputFile = options.credential_source.executable.output_file; + this.handler = new pluggable_auth_handler_1.PluggableAuthHandler({ + command: this.command, + timeoutMillis: this.timeoutMillis, + outputFile: this.outputFile, + }); + this.credentialSourceType = 'executable'; + } + /** + * Triggered when an external subject token is needed to be exchanged for a + * GCP access token via GCP STS endpoint. + * This uses the `options.credential_source` object to figure out how + * to retrieve the token using the current environment. In this case, + * this calls a user provided executable which returns the subject token. + * The logic is summarized as: + * 1. Validated that the executable is allowed to run. The + * GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment must be set to + * 1 for security reasons. + * 2. If an output file is specified by the user, check the file location + * for a response. If the file exists and contains a valid response, + * return the subject token from the file. + * 3. Call the provided executable and return response. + * @return A promise that resolves with the external subject token. + */ + async retrieveSubjectToken() { + // Check if the executable is allowed to run. + if (process.env[GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES] !== '1') { + throw new Error('Pluggable Auth executables need to be explicitly allowed to run by ' + + 'setting the GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment ' + + 'Variable to 1.'); + } + let executableResponse = undefined; + // Try to get cached executable response from output file. + if (this.outputFile) { + executableResponse = await this.handler.retrieveCachedResponse(); + } + // If no response from output file, call the executable. + if (!executableResponse) { + // Set up environment map with required values for the executable. + const envMap = new Map(); + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE', this.audience); + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE', this.subjectTokenType); + // Always set to 0 because interactive mode is not supported. + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE', '0'); + if (this.outputFile) { + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE', this.outputFile); + } + const serviceAccountEmail = this.getServiceAccountEmail(); + if (serviceAccountEmail) { + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL', serviceAccountEmail); + } + executableResponse = + await this.handler.retrieveResponseFromExecutable(envMap); + } + if (executableResponse.version > MAXIMUM_EXECUTABLE_VERSION) { + throw new Error(`Version of executable is not currently supported, maximum supported version is ${MAXIMUM_EXECUTABLE_VERSION}.`); + } + // Check that response was successful. + if (!executableResponse.success) { + throw new ExecutableError(executableResponse.errorMessage, executableResponse.errorCode); + } + // Check that response contains expiration time if output file was specified. + if (this.outputFile) { + if (!executableResponse.expirationTime) { + throw new executable_response_1.InvalidExpirationTimeFieldError('The executable response must contain the `expiration_time` field for successful responses when an output_file has been specified in the configuration.'); + } + } + // Check that response is not expired. + if (executableResponse.isExpired()) { + throw new Error('Executable response is expired.'); + } + // Return subject token from response. + return executableResponse.subjectToken; + } +} +exports.PluggableAuthClient = PluggableAuthClient; + + +/***/ }), + +/***/ 908: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PluggableAuthHandler = void 0; +const pluggable_auth_client_1 = __nccwpck_require__(6077); +const executable_response_1 = __nccwpck_require__(3247); +const childProcess = __nccwpck_require__(5317); +const fs = __nccwpck_require__(9896); +/** + * A handler used to retrieve 3rd party token responses from user defined + * executables and cached file output for the PluggableAuthClient class. + */ +class PluggableAuthHandler { + /** + * Instantiates a PluggableAuthHandler instance using the provided + * PluggableAuthHandlerOptions object. + */ + constructor(options) { + if (!options.command) { + throw new Error('No command provided.'); + } + this.commandComponents = PluggableAuthHandler.parseCommand(options.command); + this.timeoutMillis = options.timeoutMillis; + if (!this.timeoutMillis) { + throw new Error('No timeoutMillis provided.'); + } + this.outputFile = options.outputFile; + } + /** + * Calls user provided executable to get a 3rd party subject token and + * returns the response. + * @param envMap a Map of additional Environment Variables required for + * the executable. + * @return A promise that resolves with the executable response. + */ + retrieveResponseFromExecutable(envMap) { + return new Promise((resolve, reject) => { + // Spawn process to run executable using added environment variables. + const child = childProcess.spawn(this.commandComponents[0], this.commandComponents.slice(1), { + env: { ...process.env, ...Object.fromEntries(envMap) }, + }); + let output = ''; + // Append stdout to output as executable runs. + child.stdout.on('data', (data) => { + output += data; + }); + // Append stderr as executable runs. + child.stderr.on('data', (err) => { + output += err; + }); + // Set up a timeout to end the child process and throw an error. + const timeout = setTimeout(() => { + // Kill child process and remove listeners so 'close' event doesn't get + // read after child process is killed. + child.removeAllListeners(); + child.kill(); + return reject(new Error('The executable failed to finish within the timeout specified.')); + }, this.timeoutMillis); + child.on('close', (code) => { + // Cancel timeout if executable closes before timeout is reached. + clearTimeout(timeout); + if (code === 0) { + // If the executable completed successfully, try to return the parsed response. + try { + const responseJson = JSON.parse(output); + const response = new executable_response_1.ExecutableResponse(responseJson); + return resolve(response); + } + catch (error) { + if (error instanceof executable_response_1.ExecutableResponseError) { + return reject(error); + } + return reject(new executable_response_1.ExecutableResponseError(`The executable returned an invalid response: ${output}`)); + } + } + else { + return reject(new pluggable_auth_client_1.ExecutableError(output, code.toString())); + } + }); + }); + } + /** + * Checks user provided output file for response from previous run of + * executable and return the response if it exists, is formatted correctly, and is not expired. + */ + async retrieveCachedResponse() { + if (!this.outputFile || this.outputFile.length === 0) { + return undefined; + } + let filePath; + try { + filePath = await fs.promises.realpath(this.outputFile); + } + catch (_a) { + // If file path cannot be resolved, return undefined. + return undefined; + } + if (!(await fs.promises.lstat(filePath)).isFile()) { + // If path does not lead to file, return undefined. + return undefined; + } + const responseString = await fs.promises.readFile(filePath, { + encoding: 'utf8', + }); + if (responseString === '') { + return undefined; + } + try { + const responseJson = JSON.parse(responseString); + const response = new executable_response_1.ExecutableResponse(responseJson); + // Check if response is successful and unexpired. + if (response.isValid()) { + return new executable_response_1.ExecutableResponse(responseJson); + } + return undefined; + } + catch (error) { + if (error instanceof executable_response_1.ExecutableResponseError) { + throw error; + } + throw new executable_response_1.ExecutableResponseError(`The output file contained an invalid response: ${responseString}`); + } + } + /** + * Parses given command string into component array, splitting on spaces unless + * spaces are between quotation marks. + */ + static parseCommand(command) { + // Split the command into components by splitting on spaces, + // unless spaces are contained in quotation marks. + const components = command.match(/(?:[^\s"]+|"[^"]*")+/g); + if (!components) { + throw new Error(`Provided command: "${command}" could not be parsed.`); + } + // Remove quotation marks from the beginning and end of each component if they are present. + for (let i = 0; i < components.length; i++) { + if (components[i][0] === '"' && components[i].slice(-1) === '"') { + components[i] = components[i].slice(1, -1); + } + } + return components; + } +} +exports.PluggableAuthHandler = PluggableAuthHandler; + + +/***/ }), + +/***/ 9807: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2015 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.UserRefreshClient = exports.USER_REFRESH_ACCOUNT_TYPE = void 0; +const oauth2client_1 = __nccwpck_require__(91); +const querystring_1 = __nccwpck_require__(3480); +exports.USER_REFRESH_ACCOUNT_TYPE = 'authorized_user'; +class UserRefreshClient extends oauth2client_1.OAuth2Client { + constructor(optionsOrClientId, clientSecret, refreshToken, eagerRefreshThresholdMillis, forceRefreshOnFailure) { + const opts = optionsOrClientId && typeof optionsOrClientId === 'object' + ? optionsOrClientId + : { + clientId: optionsOrClientId, + clientSecret, + refreshToken, + eagerRefreshThresholdMillis, + forceRefreshOnFailure, + }; + super(opts); + this._refreshToken = opts.refreshToken; + this.credentials.refresh_token = opts.refreshToken; + } + /** + * Refreshes the access token. + * @param refreshToken An ignored refreshToken.. + * @param callback Optional callback. + */ + async refreshTokenNoCache( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + refreshToken) { + return super.refreshTokenNoCache(this._refreshToken); + } + async fetchIdToken(targetAudience) { + const res = await this.transporter.request({ + ...UserRefreshClient.RETRY_CONFIG, + url: this.endpoints.oauth2TokenUrl, + headers: { + 'Content-Type': 'application/x-www-form-urlencoded', + }, + method: 'POST', + data: (0, querystring_1.stringify)({ + client_id: this._clientId, + client_secret: this._clientSecret, + grant_type: 'refresh_token', + refresh_token: this._refreshToken, + target_audience: targetAudience, + }), + }); + return res.data.id_token; + } + /** + * Create a UserRefreshClient credentials instance using the given input + * options. + * @param json The input object. + */ + fromJSON(json) { + if (!json) { + throw new Error('Must pass in a JSON object containing the user refresh token'); + } + if (json.type !== 'authorized_user') { + throw new Error('The incoming JSON object does not have the "authorized_user" type'); + } + if (!json.client_id) { + throw new Error('The incoming JSON object does not contain a client_id field'); + } + if (!json.client_secret) { + throw new Error('The incoming JSON object does not contain a client_secret field'); + } + if (!json.refresh_token) { + throw new Error('The incoming JSON object does not contain a refresh_token field'); + } + this._clientId = json.client_id; + this._clientSecret = json.client_secret; + this._refreshToken = json.refresh_token; + this.credentials.refresh_token = json.refresh_token; + this.quotaProjectId = json.quota_project_id; + this.universeDomain = json.universe_domain || this.universeDomain; + } + fromStream(inputStream, callback) { + if (callback) { + this.fromStreamAsync(inputStream).then(() => callback(), callback); + } + else { + return this.fromStreamAsync(inputStream); + } + } + async fromStreamAsync(inputStream) { + return new Promise((resolve, reject) => { + if (!inputStream) { + return reject(new Error('Must pass in a stream containing the user refresh token.')); + } + let s = ''; + inputStream + .setEncoding('utf8') + .on('error', reject) + .on('data', chunk => (s += chunk)) + .on('end', () => { + try { + const data = JSON.parse(s); + this.fromJSON(data); + return resolve(); + } + catch (err) { + return reject(err); + } + }); + }); + } + /** + * Create a UserRefreshClient credentials instance using the given input + * options. + * @param json The input object. + */ + static fromJSON(json) { + const client = new UserRefreshClient(); + client.fromJSON(json); + return client; + } +} +exports.UserRefreshClient = UserRefreshClient; + + +/***/ }), + +/***/ 121: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.StsCredentials = void 0; +const gaxios_1 = __nccwpck_require__(7003); +const querystring = __nccwpck_require__(3480); +const transporters_1 = __nccwpck_require__(7633); +const oauth2common_1 = __nccwpck_require__(6653); +/** + * Implements the OAuth 2.0 token exchange based on + * https://tools.ietf.org/html/rfc8693 + */ +class StsCredentials extends oauth2common_1.OAuthClientAuthHandler { + /** + * Initializes an STS credentials instance. + * @param tokenExchangeEndpoint The token exchange endpoint. + * @param clientAuthentication The client authentication credentials if + * available. + */ + constructor(tokenExchangeEndpoint, clientAuthentication) { + super(clientAuthentication); + this.tokenExchangeEndpoint = tokenExchangeEndpoint; + this.transporter = new transporters_1.DefaultTransporter(); + } + /** + * Exchanges the provided token for another type of token based on the + * rfc8693 spec. + * @param stsCredentialsOptions The token exchange options used to populate + * the token exchange request. + * @param additionalHeaders Optional additional headers to pass along the + * request. + * @param options Optional additional GCP-specific non-spec defined options + * to send with the request. + * Example: `&options=${encodeUriComponent(JSON.stringified(options))}` + * @return A promise that resolves with the token exchange response containing + * the requested token and its expiration time. + */ + async exchangeToken(stsCredentialsOptions, additionalHeaders, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + options) { + var _a, _b, _c; + const values = { + grant_type: stsCredentialsOptions.grantType, + resource: stsCredentialsOptions.resource, + audience: stsCredentialsOptions.audience, + scope: (_a = stsCredentialsOptions.scope) === null || _a === void 0 ? void 0 : _a.join(' '), + requested_token_type: stsCredentialsOptions.requestedTokenType, + subject_token: stsCredentialsOptions.subjectToken, + subject_token_type: stsCredentialsOptions.subjectTokenType, + actor_token: (_b = stsCredentialsOptions.actingParty) === null || _b === void 0 ? void 0 : _b.actorToken, + actor_token_type: (_c = stsCredentialsOptions.actingParty) === null || _c === void 0 ? void 0 : _c.actorTokenType, + // Non-standard GCP-specific options. + options: options && JSON.stringify(options), + }; + // Remove undefined fields. + Object.keys(values).forEach(key => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + if (typeof values[key] === 'undefined') { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + delete values[key]; + } + }); + const headers = { + 'Content-Type': 'application/x-www-form-urlencoded', + }; + // Inject additional STS headers if available. + Object.assign(headers, additionalHeaders || {}); + const opts = { + ...StsCredentials.RETRY_CONFIG, + url: this.tokenExchangeEndpoint.toString(), + method: 'POST', + headers, + data: querystring.stringify(values), + responseType: 'json', + }; + // Apply OAuth client authentication. + this.applyClientAuthenticationOptions(opts); + try { + const response = await this.transporter.request(opts); + // Successful response. + const stsSuccessfulResponse = response.data; + stsSuccessfulResponse.res = response; + return stsSuccessfulResponse; + } + catch (error) { + // Translate error to OAuthError. + if (error instanceof gaxios_1.GaxiosError && error.response) { + throw (0, oauth2common_1.getErrorFromOAuthErrorResponse)(error.response.data, + // Preserve other fields from the original error. + error); + } + // Request could fail before the server responds. + throw error; + } + } +} +exports.StsCredentials = StsCredentials; + + +/***/ }), + +/***/ 4627: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.UrlSubjectTokenSupplier = void 0; +/** + * Internal subject token supplier implementation used when a URL + * is configured in the credential configuration used to build an {@link IdentityPoolClient} + */ +class UrlSubjectTokenSupplier { + /** + * Instantiates a URL subject token supplier. + * @param opts The URL subject token supplier options to build the supplier with. + */ + constructor(opts) { + this.url = opts.url; + this.formatType = opts.formatType; + this.subjectTokenFieldName = opts.subjectTokenFieldName; + this.headers = opts.headers; + this.additionalGaxiosOptions = opts.additionalGaxiosOptions; + } + /** + * Sends a GET request to the URL provided in the constructor and resolves + * with the returned external subject token. + * @param context {@link ExternalAccountSupplierContext} from the calling + * {@link IdentityPoolClient}, contains the requested audience and subject + * token type for the external account identity. Not used. + */ + async getSubjectToken(context) { + const opts = { + ...this.additionalGaxiosOptions, + url: this.url, + method: 'GET', + headers: this.headers, + responseType: this.formatType, + }; + let subjectToken; + if (this.formatType === 'text') { + const response = await context.transporter.request(opts); + subjectToken = response.data; + } + else if (this.formatType === 'json' && this.subjectTokenFieldName) { + const response = await context.transporter.request(opts); + subjectToken = response.data[this.subjectTokenFieldName]; + } + if (!subjectToken) { + throw new Error('Unable to parse the subject_token from the credential_source URL'); + } + return subjectToken; + } +} +exports.UrlSubjectTokenSupplier = UrlSubjectTokenSupplier; + + +/***/ }), + +/***/ 3438: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +/* global window */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.BrowserCrypto = void 0; +// This file implements crypto functions we need using in-browser +// SubtleCrypto interface `window.crypto.subtle`. +const base64js = __nccwpck_require__(8793); +const crypto_1 = __nccwpck_require__(8851); +class BrowserCrypto { + constructor() { + if (typeof window === 'undefined' || + window.crypto === undefined || + window.crypto.subtle === undefined) { + throw new Error("SubtleCrypto not found. Make sure it's an https:// website."); + } + } + async sha256DigestBase64(str) { + // SubtleCrypto digest() method is async, so we must make + // this method async as well. + // To calculate SHA256 digest using SubtleCrypto, we first + // need to convert an input string to an ArrayBuffer: + const inputBuffer = new TextEncoder().encode(str); + // Result is ArrayBuffer as well. + const outputBuffer = await window.crypto.subtle.digest('SHA-256', inputBuffer); + return base64js.fromByteArray(new Uint8Array(outputBuffer)); + } + randomBytesBase64(count) { + const array = new Uint8Array(count); + window.crypto.getRandomValues(array); + return base64js.fromByteArray(array); + } + static padBase64(base64) { + // base64js requires padding, so let's add some '=' + while (base64.length % 4 !== 0) { + base64 += '='; + } + return base64; + } + async verify(pubkey, data, signature) { + const algo = { + name: 'RSASSA-PKCS1-v1_5', + hash: { name: 'SHA-256' }, + }; + const dataArray = new TextEncoder().encode(data); + const signatureArray = base64js.toByteArray(BrowserCrypto.padBase64(signature)); + const cryptoKey = await window.crypto.subtle.importKey('jwk', pubkey, algo, true, ['verify']); + // SubtleCrypto's verify method is async so we must make + // this method async as well. + const result = await window.crypto.subtle.verify(algo, cryptoKey, signatureArray, dataArray); + return result; + } + async sign(privateKey, data) { + const algo = { + name: 'RSASSA-PKCS1-v1_5', + hash: { name: 'SHA-256' }, + }; + const dataArray = new TextEncoder().encode(data); + const cryptoKey = await window.crypto.subtle.importKey('jwk', privateKey, algo, true, ['sign']); + // SubtleCrypto's sign method is async so we must make + // this method async as well. + const result = await window.crypto.subtle.sign(algo, cryptoKey, dataArray); + return base64js.fromByteArray(new Uint8Array(result)); + } + decodeBase64StringUtf8(base64) { + const uint8array = base64js.toByteArray(BrowserCrypto.padBase64(base64)); + const result = new TextDecoder().decode(uint8array); + return result; + } + encodeBase64StringUtf8(text) { + const uint8array = new TextEncoder().encode(text); + const result = base64js.fromByteArray(uint8array); + return result; + } + /** + * Computes the SHA-256 hash of the provided string. + * @param str The plain text string to hash. + * @return A promise that resolves with the SHA-256 hash of the provided + * string in hexadecimal encoding. + */ + async sha256DigestHex(str) { + // SubtleCrypto digest() method is async, so we must make + // this method async as well. + // To calculate SHA256 digest using SubtleCrypto, we first + // need to convert an input string to an ArrayBuffer: + const inputBuffer = new TextEncoder().encode(str); + // Result is ArrayBuffer as well. + const outputBuffer = await window.crypto.subtle.digest('SHA-256', inputBuffer); + return (0, crypto_1.fromArrayBufferToHex)(outputBuffer); + } + /** + * Computes the HMAC hash of a message using the provided crypto key and the + * SHA-256 algorithm. + * @param key The secret crypto key in utf-8 or ArrayBuffer format. + * @param msg The plain text message. + * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer + * format. + */ + async signWithHmacSha256(key, msg) { + // Convert key, if provided in ArrayBuffer format, to string. + const rawKey = typeof key === 'string' + ? key + : String.fromCharCode(...new Uint16Array(key)); + const enc = new TextEncoder(); + const cryptoKey = await window.crypto.subtle.importKey('raw', enc.encode(rawKey), { + name: 'HMAC', + hash: { + name: 'SHA-256', + }, + }, false, ['sign']); + return window.crypto.subtle.sign('HMAC', cryptoKey, enc.encode(msg)); + } +} +exports.BrowserCrypto = BrowserCrypto; + + +/***/ }), + +/***/ 8851: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +/* global window */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createCrypto = createCrypto; +exports.hasBrowserCrypto = hasBrowserCrypto; +exports.fromArrayBufferToHex = fromArrayBufferToHex; +const crypto_1 = __nccwpck_require__(3438); +const crypto_2 = __nccwpck_require__(7388); +function createCrypto() { + if (hasBrowserCrypto()) { + return new crypto_1.BrowserCrypto(); + } + return new crypto_2.NodeCrypto(); +} +function hasBrowserCrypto() { + return (typeof window !== 'undefined' && + typeof window.crypto !== 'undefined' && + typeof window.crypto.subtle !== 'undefined'); +} +/** + * Converts an ArrayBuffer to a hexadecimal string. + * @param arrayBuffer The ArrayBuffer to convert to hexadecimal string. + * @return The hexadecimal encoding of the ArrayBuffer. + */ +function fromArrayBufferToHex(arrayBuffer) { + // Convert buffer to byte array. + const byteArray = Array.from(new Uint8Array(arrayBuffer)); + // Convert bytes to hex string. + return byteArray + .map(byte => { + return byte.toString(16).padStart(2, '0'); + }) + .join(''); +} + + +/***/ }), + +/***/ 7388: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NodeCrypto = void 0; +const crypto = __nccwpck_require__(6982); +class NodeCrypto { + async sha256DigestBase64(str) { + return crypto.createHash('sha256').update(str).digest('base64'); + } + randomBytesBase64(count) { + return crypto.randomBytes(count).toString('base64'); + } + async verify(pubkey, data, signature) { + const verifier = crypto.createVerify('RSA-SHA256'); + verifier.update(data); + verifier.end(); + return verifier.verify(pubkey, signature, 'base64'); + } + async sign(privateKey, data) { + const signer = crypto.createSign('RSA-SHA256'); + signer.update(data); + signer.end(); + return signer.sign(privateKey, 'base64'); + } + decodeBase64StringUtf8(base64) { + return Buffer.from(base64, 'base64').toString('utf-8'); + } + encodeBase64StringUtf8(text) { + return Buffer.from(text, 'utf-8').toString('base64'); + } + /** + * Computes the SHA-256 hash of the provided string. + * @param str The plain text string to hash. + * @return A promise that resolves with the SHA-256 hash of the provided + * string in hexadecimal encoding. + */ + async sha256DigestHex(str) { + return crypto.createHash('sha256').update(str).digest('hex'); + } + /** + * Computes the HMAC hash of a message using the provided crypto key and the + * SHA-256 algorithm. + * @param key The secret crypto key in utf-8 or ArrayBuffer format. + * @param msg The plain text message. + * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer + * format. + */ + async signWithHmacSha256(key, msg) { + const cryptoKey = typeof key === 'string' ? key : toBuffer(key); + return toArrayBuffer(crypto.createHmac('sha256', cryptoKey).update(msg).digest()); + } +} +exports.NodeCrypto = NodeCrypto; +/** + * Converts a Node.js Buffer to an ArrayBuffer. + * https://stackoverflow.com/questions/8609289/convert-a-binary-nodejs-buffer-to-javascript-arraybuffer + * @param buffer The Buffer input to covert. + * @return The ArrayBuffer representation of the input. + */ +function toArrayBuffer(buffer) { + return buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength); +} +/** + * Converts an ArrayBuffer to a Node.js Buffer. + * @param arrayBuffer The ArrayBuffer input to covert. + * @return The Buffer representation of the input. + */ +function toBuffer(arrayBuffer) { + return Buffer.from(arrayBuffer); +} + + +/***/ }), + +/***/ 492: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GoogleAuth = exports.auth = exports.DefaultTransporter = exports.PassThroughClient = exports.ExecutableError = exports.PluggableAuthClient = exports.DownscopedClient = exports.BaseExternalAccountClient = exports.ExternalAccountClient = exports.IdentityPoolClient = exports.AwsRequestSigner = exports.AwsClient = exports.UserRefreshClient = exports.LoginTicket = exports.ClientAuthentication = exports.OAuth2Client = exports.CodeChallengeMethod = exports.Impersonated = exports.JWT = exports.JWTAccess = exports.IdTokenClient = exports.IAMAuth = exports.GCPEnv = exports.Compute = exports.DEFAULT_UNIVERSE = exports.AuthClient = exports.gaxios = exports.gcpMetadata = void 0; +// Copyright 2017 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +const googleauth_1 = __nccwpck_require__(5934); +Object.defineProperty(exports, "GoogleAuth", ({ enumerable: true, get: function () { return googleauth_1.GoogleAuth; } })); +// Export common deps to ensure types/instances are the exact match. Useful +// for consistently configuring the library across versions. +exports.gcpMetadata = __nccwpck_require__(3046); +exports.gaxios = __nccwpck_require__(7003); +var authclient_1 = __nccwpck_require__(4810); +Object.defineProperty(exports, "AuthClient", ({ enumerable: true, get: function () { return authclient_1.AuthClient; } })); +Object.defineProperty(exports, "DEFAULT_UNIVERSE", ({ enumerable: true, get: function () { return authclient_1.DEFAULT_UNIVERSE; } })); +var computeclient_1 = __nccwpck_require__(977); +Object.defineProperty(exports, "Compute", ({ enumerable: true, get: function () { return computeclient_1.Compute; } })); +var envDetect_1 = __nccwpck_require__(963); +Object.defineProperty(exports, "GCPEnv", ({ enumerable: true, get: function () { return envDetect_1.GCPEnv; } })); +var iam_1 = __nccwpck_require__(7009); +Object.defineProperty(exports, "IAMAuth", ({ enumerable: true, get: function () { return iam_1.IAMAuth; } })); +var idtokenclient_1 = __nccwpck_require__(2718); +Object.defineProperty(exports, "IdTokenClient", ({ enumerable: true, get: function () { return idtokenclient_1.IdTokenClient; } })); +var jwtaccess_1 = __nccwpck_require__(7060); +Object.defineProperty(exports, "JWTAccess", ({ enumerable: true, get: function () { return jwtaccess_1.JWTAccess; } })); +var jwtclient_1 = __nccwpck_require__(5277); +Object.defineProperty(exports, "JWT", ({ enumerable: true, get: function () { return jwtclient_1.JWT; } })); +var impersonated_1 = __nccwpck_require__(9964); +Object.defineProperty(exports, "Impersonated", ({ enumerable: true, get: function () { return impersonated_1.Impersonated; } })); +var oauth2client_1 = __nccwpck_require__(91); +Object.defineProperty(exports, "CodeChallengeMethod", ({ enumerable: true, get: function () { return oauth2client_1.CodeChallengeMethod; } })); +Object.defineProperty(exports, "OAuth2Client", ({ enumerable: true, get: function () { return oauth2client_1.OAuth2Client; } })); +Object.defineProperty(exports, "ClientAuthentication", ({ enumerable: true, get: function () { return oauth2client_1.ClientAuthentication; } })); +var loginticket_1 = __nccwpck_require__(3882); +Object.defineProperty(exports, "LoginTicket", ({ enumerable: true, get: function () { return loginticket_1.LoginTicket; } })); +var refreshclient_1 = __nccwpck_require__(9807); +Object.defineProperty(exports, "UserRefreshClient", ({ enumerable: true, get: function () { return refreshclient_1.UserRefreshClient; } })); +var awsclient_1 = __nccwpck_require__(1261); +Object.defineProperty(exports, "AwsClient", ({ enumerable: true, get: function () { return awsclient_1.AwsClient; } })); +var awsrequestsigner_1 = __nccwpck_require__(7647); +Object.defineProperty(exports, "AwsRequestSigner", ({ enumerable: true, get: function () { return awsrequestsigner_1.AwsRequestSigner; } })); +var identitypoolclient_1 = __nccwpck_require__(9960); +Object.defineProperty(exports, "IdentityPoolClient", ({ enumerable: true, get: function () { return identitypoolclient_1.IdentityPoolClient; } })); +var externalclient_1 = __nccwpck_require__(8323); +Object.defineProperty(exports, "ExternalAccountClient", ({ enumerable: true, get: function () { return externalclient_1.ExternalAccountClient; } })); +var baseexternalclient_1 = __nccwpck_require__(142); +Object.defineProperty(exports, "BaseExternalAccountClient", ({ enumerable: true, get: function () { return baseexternalclient_1.BaseExternalAccountClient; } })); +var downscopedclient_1 = __nccwpck_require__(7556); +Object.defineProperty(exports, "DownscopedClient", ({ enumerable: true, get: function () { return downscopedclient_1.DownscopedClient; } })); +var pluggable_auth_client_1 = __nccwpck_require__(6077); +Object.defineProperty(exports, "PluggableAuthClient", ({ enumerable: true, get: function () { return pluggable_auth_client_1.PluggableAuthClient; } })); +Object.defineProperty(exports, "ExecutableError", ({ enumerable: true, get: function () { return pluggable_auth_client_1.ExecutableError; } })); +var passthrough_1 = __nccwpck_require__(2045); +Object.defineProperty(exports, "PassThroughClient", ({ enumerable: true, get: function () { return passthrough_1.PassThroughClient; } })); +var transporters_1 = __nccwpck_require__(7633); +Object.defineProperty(exports, "DefaultTransporter", ({ enumerable: true, get: function () { return transporters_1.DefaultTransporter; } })); +const auth = new googleauth_1.GoogleAuth(); +exports.auth = auth; + + +/***/ }), + +/***/ 8290: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2017 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.validate = validate; +// Accepts an options object passed from the user to the API. In the +// previous version of the API, it referred to a `Request` options object. +// Now it refers to an Axiox Request Config object. This is here to help +// ensure users don't pass invalid options when they upgrade from 0.x to 1.x. +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function validate(options) { + const vpairs = [ + { invalid: 'uri', expected: 'url' }, + { invalid: 'json', expected: 'data' }, + { invalid: 'qs', expected: 'params' }, + ]; + for (const pair of vpairs) { + if (options[pair.invalid]) { + const e = `'${pair.invalid}' is not a valid configuration option. Please use '${pair.expected}' instead. This library is using Axios for requests. Please see https://github.com/axios/axios to learn more about the valid request options.`; + throw new Error(e); + } + } +} + + +/***/ }), + +/***/ 7633: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DefaultTransporter = void 0; +const gaxios_1 = __nccwpck_require__(7003); +const options_1 = __nccwpck_require__(8290); +// eslint-disable-next-line @typescript-eslint/no-var-requires +const pkg = __nccwpck_require__(6066); +const PRODUCT_NAME = 'google-api-nodejs-client'; +class DefaultTransporter { + constructor() { + /** + * A configurable, replacable `Gaxios` instance. + */ + this.instance = new gaxios_1.Gaxios(); + } + /** + * Configures request options before making a request. + * @param opts GaxiosOptions options. + * @return Configured options. + */ + configure(opts = {}) { + opts.headers = opts.headers || {}; + if (typeof window === 'undefined') { + // set transporter user agent if not in browser + const uaValue = opts.headers['User-Agent']; + if (!uaValue) { + opts.headers['User-Agent'] = DefaultTransporter.USER_AGENT; + } + else if (!uaValue.includes(`${PRODUCT_NAME}/`)) { + opts.headers['User-Agent'] = + `${uaValue} ${DefaultTransporter.USER_AGENT}`; + } + // track google-auth-library-nodejs version: + if (!opts.headers['x-goog-api-client']) { + const nodeVersion = process.version.replace(/^v/, ''); + opts.headers['x-goog-api-client'] = `gl-node/${nodeVersion}`; + } + } + return opts; + } + /** + * Makes a request using Gaxios with given options. + * @param opts GaxiosOptions options. + * @param callback optional callback that contains GaxiosResponse object. + * @return GaxiosPromise, assuming no callback is passed. + */ + request(opts) { + // ensure the user isn't passing in request-style options + opts = this.configure(opts); + (0, options_1.validate)(opts); + return this.instance.request(opts).catch(e => { + throw this.processError(e); + }); + } + get defaults() { + return this.instance.defaults; + } + set defaults(opts) { + this.instance.defaults = opts; + } + /** + * Changes the error to include details from the body. + */ + processError(e) { + const res = e.response; + const err = e; + const body = res ? res.data : null; + if (res && body && body.error && res.status !== 200) { + if (typeof body.error === 'string') { + err.message = body.error; + err.status = res.status; + } + else if (Array.isArray(body.error.errors)) { + err.message = body.error.errors + .map((err2) => err2.message) + .join('\n'); + err.code = body.error.code; + err.errors = body.error.errors; + } + else { + err.message = body.error.message; + err.code = body.error.code; + } + } + else if (res && res.status >= 400) { + // Consider all 4xx and 5xx responses errors. + err.message = body; + err.status = res.status; + } + return err; + } +} +exports.DefaultTransporter = DefaultTransporter; +/** + * Default user agent. + */ +DefaultTransporter.USER_AGENT = `${PRODUCT_NAME}/${pkg.version}`; + + +/***/ }), + +/***/ 7870: +/***/ (function(__unused_webpack_module, exports) { + +"use strict"; + +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _LRUCache_instances, _LRUCache_cache, _LRUCache_moveToEnd, _LRUCache_evict; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.LRUCache = void 0; +exports.snakeToCamel = snakeToCamel; +exports.originalOrCamelOptions = originalOrCamelOptions; +/** + * Returns the camel case of a provided string. + * + * @remarks + * + * Match any `_` and not `_` pair, then return the uppercase of the not `_` + * character. + * + * @internal + * + * @param str the string to convert + * @returns the camelCase'd string + */ +function snakeToCamel(str) { + return str.replace(/([_][^_])/g, match => match.slice(1).toUpperCase()); +} +/** + * Get the value of `obj[key]` or `obj[camelCaseKey]`, with a preference + * for original, non-camelCase key. + * + * @param obj object to lookup a value in + * @returns a `get` function for getting `obj[key || snakeKey]`, if available + */ +function originalOrCamelOptions(obj) { + /** + * + * @param key an index of object, preferably snake_case + * @returns the value `obj[key || snakeKey]`, if available + */ + function get(key) { + var _a; + const o = (obj || {}); + return (_a = o[key]) !== null && _a !== void 0 ? _a : o[snakeToCamel(key)]; + } + return { get }; +} +/** + * A simple LRU cache utility. + * Not meant for external usage. + * + * @experimental + * @internal + */ +class LRUCache { + constructor(options) { + _LRUCache_instances.add(this); + /** + * Maps are in order. Thus, the older item is the first item. + * + * {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map} + */ + _LRUCache_cache.set(this, new Map()); + this.capacity = options.capacity; + this.maxAge = options.maxAge; + } + /** + * Add an item to the cache. + * + * @param key the key to upsert + * @param value the value of the key + */ + set(key, value) { + __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_moveToEnd).call(this, key, value); + __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_evict).call(this); + } + /** + * Get an item from the cache. + * + * @param key the key to retrieve + */ + get(key) { + const item = __classPrivateFieldGet(this, _LRUCache_cache, "f").get(key); + if (!item) + return; + __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_moveToEnd).call(this, key, item.value); + __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_evict).call(this); + return item.value; + } +} +exports.LRUCache = LRUCache; +_LRUCache_cache = new WeakMap(), _LRUCache_instances = new WeakSet(), _LRUCache_moveToEnd = function _LRUCache_moveToEnd(key, value) { + __classPrivateFieldGet(this, _LRUCache_cache, "f").delete(key); + __classPrivateFieldGet(this, _LRUCache_cache, "f").set(key, { + value, + lastAccessed: Date.now(), + }); +}, _LRUCache_evict = function _LRUCache_evict() { + const cutoffDate = this.maxAge ? Date.now() - this.maxAge : 0; + /** + * Because we know Maps are in order, this item is both the + * last item in the list (capacity) and oldest (maxAge). + */ + let oldestItem = __classPrivateFieldGet(this, _LRUCache_cache, "f").entries().next(); + while (!oldestItem.done && + (__classPrivateFieldGet(this, _LRUCache_cache, "f").size > this.capacity || // too many + oldestItem.value[1].lastAccessed < cutoffDate) // too old + ) { + __classPrivateFieldGet(this, _LRUCache_cache, "f").delete(oldestItem.value[0]); + oldestItem = __classPrivateFieldGet(this, _LRUCache_cache, "f").entries().next(); + } +}; + + +/***/ }), + +/***/ 1628: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Colours = void 0; +/** + * Handles figuring out if we can use ANSI colours and handing out the escape codes. + * + * This is for package-internal use only, and may change at any time. + * + * @private + * @internal + */ +class Colours { + /** + * @param stream The stream (e.g. process.stderr) + * @returns true if the stream should have colourization enabled + */ + static isEnabled(stream) { + return (stream.isTTY && + (typeof stream.getColorDepth === 'function' + ? stream.getColorDepth() > 2 + : true)); + } + static refresh() { + Colours.enabled = Colours.isEnabled(process.stderr); + if (!this.enabled) { + Colours.reset = ''; + Colours.bright = ''; + Colours.dim = ''; + Colours.red = ''; + Colours.green = ''; + Colours.yellow = ''; + Colours.blue = ''; + Colours.magenta = ''; + Colours.cyan = ''; + Colours.white = ''; + Colours.grey = ''; + } + else { + Colours.reset = '\u001b[0m'; + Colours.bright = '\u001b[1m'; + Colours.dim = '\u001b[2m'; + Colours.red = '\u001b[31m'; + Colours.green = '\u001b[32m'; + Colours.yellow = '\u001b[33m'; + Colours.blue = '\u001b[34m'; + Colours.magenta = '\u001b[35m'; + Colours.cyan = '\u001b[36m'; + Colours.white = '\u001b[37m'; + Colours.grey = '\u001b[90m'; + } + } +} +exports.Colours = Colours; +Colours.enabled = false; +Colours.reset = ''; +Colours.bright = ''; +Colours.dim = ''; +Colours.red = ''; +Colours.green = ''; +Colours.yellow = ''; +Colours.blue = ''; +Colours.magenta = ''; +Colours.cyan = ''; +Colours.white = ''; +Colours.grey = ''; +Colours.refresh(); +//# sourceMappingURL=colours.js.map + +/***/ }), + +/***/ 1577: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +__exportStar(__nccwpck_require__(4788), exports); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 4788: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2021-2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.env = exports.DebugLogBackendBase = exports.placeholder = exports.AdhocDebugLogger = exports.LogSeverity = void 0; +exports.getNodeBackend = getNodeBackend; +exports.getDebugBackend = getDebugBackend; +exports.getStructuredBackend = getStructuredBackend; +exports.setBackend = setBackend; +exports.log = log; +const node_events_1 = __nccwpck_require__(8474); +const process = __importStar(__nccwpck_require__(1708)); +const util = __importStar(__nccwpck_require__(7975)); +const colours_1 = __nccwpck_require__(1628); +// Some functions (as noted) are based on the Node standard library, from +// the following file: +// +// https://github.com/nodejs/node/blob/main/lib/internal/util/debuglog.js +/** + * This module defines an ad-hoc debug logger for Google Cloud Platform + * client libraries in Node. An ad-hoc debug logger is a tool which lets + * users use an external, unified interface (in this case, environment + * variables) to determine what logging they want to see at runtime. This + * isn't necessarily fed into the console, but is meant to be under the + * control of the user. The kind of logging that will be produced by this + * is more like "call retry happened", not "event you'd want to record + * in Cloud Logger". + * + * More for Googlers implementing libraries with it: + * go/cloud-client-logging-design + */ +/** + * Possible log levels. These are a subset of Cloud Observability levels. + * https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry#LogSeverity + */ +var LogSeverity; +(function (LogSeverity) { + LogSeverity["DEFAULT"] = "DEFAULT"; + LogSeverity["DEBUG"] = "DEBUG"; + LogSeverity["INFO"] = "INFO"; + LogSeverity["WARNING"] = "WARNING"; + LogSeverity["ERROR"] = "ERROR"; +})(LogSeverity || (exports.LogSeverity = LogSeverity = {})); +/** + * Our logger instance. This actually contains the meat of dealing + * with log lines, including EventEmitter. This contains the function + * that will be passed back to users of the package. + */ +class AdhocDebugLogger extends node_events_1.EventEmitter { + /** + * @param upstream The backend will pass a function that will be + * called whenever our logger function is invoked. + */ + constructor(namespace, upstream) { + super(); + this.namespace = namespace; + this.upstream = upstream; + this.func = Object.assign(this.invoke.bind(this), { + // Also add an instance pointer back to us. + instance: this, + // And pull over the EventEmitter functionality. + on: (event, listener) => this.on(event, listener), + }); + // Convenience methods for log levels. + this.func.debug = (...args) => this.invokeSeverity(LogSeverity.DEBUG, ...args); + this.func.info = (...args) => this.invokeSeverity(LogSeverity.INFO, ...args); + this.func.warn = (...args) => this.invokeSeverity(LogSeverity.WARNING, ...args); + this.func.error = (...args) => this.invokeSeverity(LogSeverity.ERROR, ...args); + this.func.sublog = (namespace) => log(namespace, this.func); + } + invoke(fields, ...args) { + // Push out any upstream logger first. + if (this.upstream) { + this.upstream(fields, ...args); + } + // Emit sink events. + this.emit('log', fields, args); + } + invokeSeverity(severity, ...args) { + this.invoke({ severity }, ...args); + } +} +exports.AdhocDebugLogger = AdhocDebugLogger; +/** + * This can be used in place of a real logger while waiting for Promises or disabling logging. + */ +exports.placeholder = new AdhocDebugLogger('', () => { }).func; +/** + * The base class for debug logging backends. It's possible to use this, but the + * same non-guarantees above still apply (unstable interface, etc). + * + * @private + * @internal + */ +class DebugLogBackendBase { + constructor() { + var _a; + this.cached = new Map(); + this.filters = []; + this.filtersSet = false; + // Look for the Node config variable for what systems to enable. We'll store + // these for the log method below, which will call setFilters() once. + let nodeFlag = (_a = process.env[exports.env.nodeEnables]) !== null && _a !== void 0 ? _a : '*'; + if (nodeFlag === 'all') { + nodeFlag = '*'; + } + this.filters = nodeFlag.split(','); + } + log(namespace, fields, ...args) { + try { + if (!this.filtersSet) { + this.setFilters(); + this.filtersSet = true; + } + let logger = this.cached.get(namespace); + if (!logger) { + logger = this.makeLogger(namespace); + this.cached.set(namespace, logger); + } + logger(fields, ...args); + } + catch (e) { + // Silently ignore all errors; we don't want them to interfere with + // the user's running app. + // e; + console.error(e); + } + } +} +exports.DebugLogBackendBase = DebugLogBackendBase; +// The basic backend. This one definitely works, but it's less feature-filled. +// +// Rather than using util.debuglog, this implements the same basic logic directly. +// The reason for this decision is that debuglog checks the value of the +// NODE_DEBUG environment variable before any user code runs; we therefore +// can't pipe our own enables into it (and util.debuglog will never print unless +// the user duplicates it into NODE_DEBUG, which isn't reasonable). +// +class NodeBackend extends DebugLogBackendBase { + constructor() { + super(...arguments); + // Default to allowing all systems, since we gate earlier based on whether the + // variable is empty. + this.enabledRegexp = /.*/g; + } + isEnabled(namespace) { + return this.enabledRegexp.test(namespace); + } + makeLogger(namespace) { + if (!this.enabledRegexp.test(namespace)) { + return () => { }; + } + return (fields, ...args) => { + var _a; + // TODO: `fields` needs to be turned into a string here, one way or another. + const nscolour = `${colours_1.Colours.green}${namespace}${colours_1.Colours.reset}`; + const pid = `${colours_1.Colours.yellow}${process.pid}${colours_1.Colours.reset}`; + let level; + switch (fields.severity) { + case LogSeverity.ERROR: + level = `${colours_1.Colours.red}${fields.severity}${colours_1.Colours.reset}`; + break; + case LogSeverity.INFO: + level = `${colours_1.Colours.magenta}${fields.severity}${colours_1.Colours.reset}`; + break; + case LogSeverity.WARNING: + level = `${colours_1.Colours.yellow}${fields.severity}${colours_1.Colours.reset}`; + break; + default: + level = (_a = fields.severity) !== null && _a !== void 0 ? _a : LogSeverity.DEFAULT; + break; + } + const msg = util.formatWithOptions({ colors: colours_1.Colours.enabled }, ...args); + const filteredFields = Object.assign({}, fields); + delete filteredFields.severity; + const fieldsJson = Object.getOwnPropertyNames(filteredFields).length + ? JSON.stringify(filteredFields) + : ''; + const fieldsColour = fieldsJson + ? `${colours_1.Colours.grey}${fieldsJson}${colours_1.Colours.reset}` + : ''; + console.error('%s [%s|%s] %s%s', pid, nscolour, level, msg, fieldsJson ? ` ${fieldsColour}` : ''); + }; + } + // Regexp patterns below are from here: + // https://github.com/nodejs/node/blob/c0aebed4b3395bd65d54b18d1fd00f071002ac20/lib/internal/util/debuglog.js#L36 + setFilters() { + const totalFilters = this.filters.join(','); + const regexp = totalFilters + .replace(/[|\\{}()[\]^$+?.]/g, '\\$&') + .replace(/\*/g, '.*') + .replace(/,/g, '$|^'); + this.enabledRegexp = new RegExp(`^${regexp}$`, 'i'); + } +} +/** + * @returns A backend based on Node util.debuglog; this is the default. + */ +function getNodeBackend() { + return new NodeBackend(); +} +class DebugBackend extends DebugLogBackendBase { + constructor(pkg) { + super(); + this.debugPkg = pkg; + } + makeLogger(namespace) { + const debugLogger = this.debugPkg(namespace); + return (fields, ...args) => { + // TODO: `fields` needs to be turned into a string here. + debugLogger(args[0], ...args.slice(1)); + }; + } + setFilters() { + var _a; + const existingFilters = (_a = process.env['NODE_DEBUG']) !== null && _a !== void 0 ? _a : ''; + process.env['NODE_DEBUG'] = `${existingFilters}${existingFilters ? ',' : ''}${this.filters.join(',')}`; + } +} +/** + * Creates a "debug" package backend. The user must call require('debug') and pass + * the resulting object to this function. + * + * ``` + * setBackend(getDebugBackend(require('debug'))) + * ``` + * + * https://www.npmjs.com/package/debug + * + * Note: Google does not explicitly endorse or recommend this package; it's just + * being provided as an option. + * + * @returns A backend based on the npm "debug" package. + */ +function getDebugBackend(debugPkg) { + return new DebugBackend(debugPkg); +} +/** + * This pretty much works like the Node logger, but it outputs structured + * logging JSON matching Google Cloud's ingestion specs. Rather than handling + * its own output, it wraps another backend. The passed backend must be a subclass + * of `DebugLogBackendBase` (any of the backends exposed by this package will work). + */ +class StructuredBackend extends DebugLogBackendBase { + constructor(upstream) { + var _a; + super(); + this.upstream = (_a = upstream) !== null && _a !== void 0 ? _a : new NodeBackend(); + } + makeLogger(namespace) { + const debugLogger = this.upstream.makeLogger(namespace); + return (fields, ...args) => { + var _a; + const severity = (_a = fields.severity) !== null && _a !== void 0 ? _a : LogSeverity.INFO; + const json = Object.assign({ + severity, + message: util.format(...args), + }, fields); + const jsonString = JSON.stringify(json); + debugLogger(fields, jsonString); + }; + } + setFilters() { + this.upstream.setFilters(); + } +} +/** + * Creates a "structured logging" backend. This pretty much works like the + * Node logger, but it outputs structured logging JSON matching Google + * Cloud's ingestion specs instead of plain text. + * + * ``` + * setBackend(getStructuredBackend()) + * ``` + * + * @param upstream If you want to use something besides the Node backend to + * write the actual log lines into, pass that here. + * @returns A backend based on Google Cloud structured logging. + */ +function getStructuredBackend(upstream) { + return new StructuredBackend(upstream); +} +/** + * The environment variables that we standardized on, for all ad-hoc logging. + */ +exports.env = { + /** + * Filter wildcards specific to the Node syntax, and similar to the built-in + * utils.debuglog() environment variable. If missing, disables logging. + */ + nodeEnables: 'GOOGLE_SDK_NODE_LOGGING', +}; +// Keep a copy of all namespaced loggers so users can reliably .on() them. +// Note that these cached functions will need to deal with changes in the backend. +const loggerCache = new Map(); +// Our current global backend. This might be: +let cachedBackend = undefined; +/** + * Set the backend to use for our log output. + * - A backend object + * - null to disable logging + * - undefined for "nothing yet", defaults to the Node backend + * + * @param backend Results from one of the get*Backend() functions. + */ +function setBackend(backend) { + cachedBackend = backend; + loggerCache.clear(); +} +/** + * Creates a logging function. Multiple calls to this with the same namespace + * will produce the same logger, with the same event emitter hooks. + * + * Namespaces can be a simple string ("system" name), or a qualified string + * (system:subsystem), which can be used for filtering, or for "system:*". + * + * @param namespace The namespace, a descriptive text string. + * @returns A function you can call that works similar to console.log(). + */ +function log(namespace, parent) { + // If the enable flag isn't set, do nothing. + const enablesFlag = process.env[exports.env.nodeEnables]; + if (!enablesFlag) { + return exports.placeholder; + } + // This might happen mostly if the typings are dropped in a user's code, + // or if they're calling from JavaScript. + if (!namespace) { + return exports.placeholder; + } + // Handle sub-loggers. + if (parent) { + namespace = `${parent.instance.namespace}:${namespace}`; + } + // Reuse loggers so things like event sinks are persistent. + const existing = loggerCache.get(namespace); + if (existing) { + return existing.func; + } + // Do we have a backend yet? + if (cachedBackend === null) { + // Explicitly disabled. + return exports.placeholder; + } + else if (cachedBackend === undefined) { + // One hasn't been made yet, so default to Node. + cachedBackend = getNodeBackend(); + } + // The logger is further wrapped so we can handle the backend changing out. + const logger = (() => { + let previousBackend = undefined; + const newLogger = new AdhocDebugLogger(namespace, (fields, ...args) => { + if (previousBackend !== cachedBackend) { + // Did the user pass a custom backend? + if (cachedBackend === null) { + // Explicitly disabled. + return; + } + else if (cachedBackend === undefined) { + // One hasn't been made yet, so default to Node. + cachedBackend = getNodeBackend(); + } + previousBackend = cachedBackend; + } + cachedBackend === null || cachedBackend === void 0 ? void 0 : cachedBackend.log(namespace, fields, ...args); + }); + return newLogger; + })(); + loggerCache.set(namespace, logger); + return logger.func; +} +//# sourceMappingURL=logging-utils.js.map + +/***/ }), + +/***/ 8568: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +/** + * Copyright 2018 Google LLC + * + * Distributed under MIT license. + * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT + */ +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var _GoogleToken_instances, _GoogleToken_inFlightRequest, _GoogleToken_getTokenAsync, _GoogleToken_getTokenAsyncInner, _GoogleToken_ensureEmail, _GoogleToken_revokeTokenAsync, _GoogleToken_configure, _GoogleToken_requestToken; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GoogleToken = void 0; +const fs = __nccwpck_require__(9896); +const gaxios_1 = __nccwpck_require__(7003); +const jws = __nccwpck_require__(3324); +const path = __nccwpck_require__(6928); +const util_1 = __nccwpck_require__(9023); +const readFile = fs.readFile + ? (0, util_1.promisify)(fs.readFile) + : async () => { + // if running in the web-browser, fs.readFile may not have been shimmed. + throw new ErrorWithCode('use key rather than keyFile.', 'MISSING_CREDENTIALS'); + }; +const GOOGLE_TOKEN_URL = 'https://www.googleapis.com/oauth2/v4/token'; +const GOOGLE_REVOKE_TOKEN_URL = 'https://accounts.google.com/o/oauth2/revoke?token='; +class ErrorWithCode extends Error { + constructor(message, code) { + super(message); + this.code = code; + } +} +class GoogleToken { + get accessToken() { + return this.rawToken ? this.rawToken.access_token : undefined; + } + get idToken() { + return this.rawToken ? this.rawToken.id_token : undefined; + } + get tokenType() { + return this.rawToken ? this.rawToken.token_type : undefined; + } + get refreshToken() { + return this.rawToken ? this.rawToken.refresh_token : undefined; + } + /** + * Create a GoogleToken. + * + * @param options Configuration object. + */ + constructor(options) { + _GoogleToken_instances.add(this); + this.transporter = { + request: opts => (0, gaxios_1.request)(opts), + }; + _GoogleToken_inFlightRequest.set(this, void 0); + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_configure).call(this, options); + } + /** + * Returns whether the token has expired. + * + * @return true if the token has expired, false otherwise. + */ + hasExpired() { + const now = new Date().getTime(); + if (this.rawToken && this.expiresAt) { + return now >= this.expiresAt; + } + else { + return true; + } + } + /** + * Returns whether the token will expire within eagerRefreshThresholdMillis + * + * @return true if the token will be expired within eagerRefreshThresholdMillis, false otherwise. + */ + isTokenExpiring() { + var _a; + const now = new Date().getTime(); + const eagerRefreshThresholdMillis = (_a = this.eagerRefreshThresholdMillis) !== null && _a !== void 0 ? _a : 0; + if (this.rawToken && this.expiresAt) { + return this.expiresAt <= now + eagerRefreshThresholdMillis; + } + else { + return true; + } + } + getToken(callback, opts = {}) { + if (typeof callback === 'object') { + opts = callback; + callback = undefined; + } + opts = Object.assign({ + forceRefresh: false, + }, opts); + if (callback) { + const cb = callback; + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_getTokenAsync).call(this, opts).then(t => cb(null, t), callback); + return; + } + return __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_getTokenAsync).call(this, opts); + } + /** + * Given a keyFile, extract the key and client email if available + * @param keyFile Path to a json, pem, or p12 file that contains the key. + * @returns an object with privateKey and clientEmail properties + */ + async getCredentials(keyFile) { + const ext = path.extname(keyFile); + switch (ext) { + case '.json': { + const key = await readFile(keyFile, 'utf8'); + const body = JSON.parse(key); + const privateKey = body.private_key; + const clientEmail = body.client_email; + if (!privateKey || !clientEmail) { + throw new ErrorWithCode('private_key and client_email are required.', 'MISSING_CREDENTIALS'); + } + return { privateKey, clientEmail }; + } + case '.der': + case '.crt': + case '.pem': { + const privateKey = await readFile(keyFile, 'utf8'); + return { privateKey }; + } + case '.p12': + case '.pfx': { + throw new ErrorWithCode('*.p12 certificates are not supported after v6.1.2. ' + + 'Consider utilizing *.json format or converting *.p12 to *.pem using the OpenSSL CLI.', 'UNKNOWN_CERTIFICATE_TYPE'); + } + default: + throw new ErrorWithCode('Unknown certificate type. Type is determined based on file extension. ' + + 'Current supported extensions are *.json, and *.pem.', 'UNKNOWN_CERTIFICATE_TYPE'); + } + } + revokeToken(callback) { + if (callback) { + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_revokeTokenAsync).call(this).then(() => callback(), callback); + return; + } + return __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_revokeTokenAsync).call(this); + } +} +exports.GoogleToken = GoogleToken; +_GoogleToken_inFlightRequest = new WeakMap(), _GoogleToken_instances = new WeakSet(), _GoogleToken_getTokenAsync = async function _GoogleToken_getTokenAsync(opts) { + if (__classPrivateFieldGet(this, _GoogleToken_inFlightRequest, "f") && !opts.forceRefresh) { + return __classPrivateFieldGet(this, _GoogleToken_inFlightRequest, "f"); + } + try { + return await (__classPrivateFieldSet(this, _GoogleToken_inFlightRequest, __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_getTokenAsyncInner).call(this, opts), "f")); + } + finally { + __classPrivateFieldSet(this, _GoogleToken_inFlightRequest, undefined, "f"); + } +}, _GoogleToken_getTokenAsyncInner = async function _GoogleToken_getTokenAsyncInner(opts) { + if (this.isTokenExpiring() === false && opts.forceRefresh === false) { + return Promise.resolve(this.rawToken); + } + if (!this.key && !this.keyFile) { + throw new Error('No key or keyFile set.'); + } + if (!this.key && this.keyFile) { + const creds = await this.getCredentials(this.keyFile); + this.key = creds.privateKey; + this.iss = creds.clientEmail || this.iss; + if (!creds.clientEmail) { + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_ensureEmail).call(this); + } + } + return __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_requestToken).call(this); +}, _GoogleToken_ensureEmail = function _GoogleToken_ensureEmail() { + if (!this.iss) { + throw new ErrorWithCode('email is required.', 'MISSING_CREDENTIALS'); + } +}, _GoogleToken_revokeTokenAsync = async function _GoogleToken_revokeTokenAsync() { + if (!this.accessToken) { + throw new Error('No token to revoke.'); + } + const url = GOOGLE_REVOKE_TOKEN_URL + this.accessToken; + await this.transporter.request({ + url, + retry: true, + }); + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_configure).call(this, { + email: this.iss, + sub: this.sub, + key: this.key, + keyFile: this.keyFile, + scope: this.scope, + additionalClaims: this.additionalClaims, + }); +}, _GoogleToken_configure = function _GoogleToken_configure(options = {}) { + this.keyFile = options.keyFile; + this.key = options.key; + this.rawToken = undefined; + this.iss = options.email || options.iss; + this.sub = options.sub; + this.additionalClaims = options.additionalClaims; + if (typeof options.scope === 'object') { + this.scope = options.scope.join(' '); + } + else { + this.scope = options.scope; + } + this.eagerRefreshThresholdMillis = options.eagerRefreshThresholdMillis; + if (options.transporter) { + this.transporter = options.transporter; + } +}, _GoogleToken_requestToken = +/** + * Request the token from Google. + */ +async function _GoogleToken_requestToken() { + var _a, _b; + const iat = Math.floor(new Date().getTime() / 1000); + const additionalClaims = this.additionalClaims || {}; + const payload = Object.assign({ + iss: this.iss, + scope: this.scope, + aud: GOOGLE_TOKEN_URL, + exp: iat + 3600, + iat, + sub: this.sub, + }, additionalClaims); + const signedJWT = jws.sign({ + header: { alg: 'RS256' }, + payload, + secret: this.key, + }); + try { + const r = await this.transporter.request({ + method: 'POST', + url: GOOGLE_TOKEN_URL, + data: { + grant_type: 'urn:ietf:params:oauth:grant-type:jwt-bearer', + assertion: signedJWT, + }, + headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, + responseType: 'json', + retryConfig: { + httpMethodsToRetry: ['POST'], + }, + }); + this.rawToken = r.data; + this.expiresAt = + r.data.expires_in === null || r.data.expires_in === undefined + ? undefined + : (iat + r.data.expires_in) * 1000; + return this.rawToken; + } + catch (e) { + this.rawToken = undefined; + this.tokenExpires = undefined; + const body = e.response && ((_a = e.response) === null || _a === void 0 ? void 0 : _a.data) + ? (_b = e.response) === null || _b === void 0 ? void 0 : _b.data + : {}; + if (body.error) { + const desc = body.error_description + ? `: ${body.error_description}` + : ''; + e.message = `${body.error}${desc}`; + } + throw e; + } +}; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 3813: +/***/ ((module) => { + +"use strict"; + + +module.exports = (flag, argv = process.argv) => { + const prefix = flag.startsWith('-') ? '' : (flag.length === 1 ? '-' : '--'); + const position = argv.indexOf(prefix + flag); + const terminatorPosition = argv.indexOf('--'); + return position !== -1 && (terminatorPosition === -1 || position < terminatorPosition); +}; + + +/***/ }), + +/***/ 7847: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const net_1 = __importDefault(__nccwpck_require__(9278)); +const tls_1 = __importDefault(__nccwpck_require__(4756)); +const url_1 = __importDefault(__nccwpck_require__(7016)); +const debug_1 = __importDefault(__nccwpck_require__(2830)); +const once_1 = __importDefault(__nccwpck_require__(8662)); +const agent_base_1 = __nccwpck_require__(2960); +const debug = (0, debug_1.default)('http-proxy-agent'); +function isHTTPS(protocol) { + return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false; +} +/** + * The `HttpProxyAgent` implements an HTTP Agent subclass that connects + * to the specified "HTTP proxy server" in order to proxy HTTP requests. + * + * @api public + */ +class HttpProxyAgent extends agent_base_1.Agent { + constructor(_opts) { + let opts; + if (typeof _opts === 'string') { + opts = url_1.default.parse(_opts); + } + else { + opts = _opts; + } + if (!opts) { + throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!'); + } + debug('Creating new HttpProxyAgent instance: %o', opts); + super(opts); + const proxy = Object.assign({}, opts); + // If `true`, then connect to the proxy server over TLS. + // Defaults to `false`. + this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol); + // Prefer `hostname` over `host`, and set the `port` if needed. + proxy.host = proxy.hostname || proxy.host; + if (typeof proxy.port === 'string') { + proxy.port = parseInt(proxy.port, 10); + } + if (!proxy.port && proxy.host) { + proxy.port = this.secureProxy ? 443 : 80; + } + if (proxy.host && proxy.path) { + // If both a `host` and `path` are specified then it's most likely + // the result of a `url.parse()` call... we need to remove the + // `path` portion so that `net.connect()` doesn't attempt to open + // that as a Unix socket file. + delete proxy.path; + delete proxy.pathname; + } + this.proxy = proxy; + } + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + * + * @api protected + */ + callback(req, opts) { + return __awaiter(this, void 0, void 0, function* () { + const { proxy, secureProxy } = this; + const parsed = url_1.default.parse(req.path); + if (!parsed.protocol) { + parsed.protocol = 'http:'; + } + if (!parsed.hostname) { + parsed.hostname = opts.hostname || opts.host || null; + } + if (parsed.port == null && typeof opts.port) { + parsed.port = String(opts.port); + } + if (parsed.port === '80') { + // if port is 80, then we can remove the port so that the + // ":80" portion is not on the produced URL + parsed.port = ''; + } + // Change the `http.ClientRequest` instance's "path" field + // to the absolute path of the URL that will be requested. + req.path = url_1.default.format(parsed); + // Inject the `Proxy-Authorization` header if necessary. + if (proxy.auth) { + req.setHeader('Proxy-Authorization', `Basic ${Buffer.from(proxy.auth).toString('base64')}`); + } + // Create a socket connection to the proxy server. + let socket; + if (secureProxy) { + debug('Creating `tls.Socket`: %o', proxy); + socket = tls_1.default.connect(proxy); + } + else { + debug('Creating `net.Socket`: %o', proxy); + socket = net_1.default.connect(proxy); + } + // At this point, the http ClientRequest's internal `_header` field + // might have already been set. If this is the case then we'll need + // to re-generate the string since we just changed the `req.path`. + if (req._header) { + let first; + let endOfHeaders; + debug('Regenerating stored HTTP header string for request'); + req._header = null; + req._implicitHeader(); + if (req.output && req.output.length > 0) { + // Node < 12 + debug('Patching connection write() output buffer with updated header'); + first = req.output[0]; + endOfHeaders = first.indexOf('\r\n\r\n') + 4; + req.output[0] = req._header + first.substring(endOfHeaders); + debug('Output buffer: %o', req.output); + } + else if (req.outputData && req.outputData.length > 0) { + // Node >= 12 + debug('Patching connection write() output buffer with updated header'); + first = req.outputData[0].data; + endOfHeaders = first.indexOf('\r\n\r\n') + 4; + req.outputData[0].data = + req._header + first.substring(endOfHeaders); + debug('Output buffer: %o', req.outputData[0].data); + } + } + // Wait for the socket's `connect` event, so that this `callback()` + // function throws instead of the `http` request machinery. This is + // important for i.e. `PacProxyAgent` which determines a failed proxy + // connection via the `callback()` function throwing. + yield (0, once_1.default)(socket, 'connect'); + return socket; + }); + } +} +exports["default"] = HttpProxyAgent; +//# sourceMappingURL=agent.js.map + +/***/ }), + +/***/ 1970: +/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const agent_1 = __importDefault(__nccwpck_require__(7847)); +function createHttpProxyAgent(opts) { + return new agent_1.default(opts); +} +(function (createHttpProxyAgent) { + createHttpProxyAgent.HttpProxyAgent = agent_1.default; + createHttpProxyAgent.prototype = agent_1.default.prototype; +})(createHttpProxyAgent || (createHttpProxyAgent = {})); +module.exports = createHttpProxyAgent; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 2960: +/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const events_1 = __nccwpck_require__(4434); +const debug_1 = __importDefault(__nccwpck_require__(2830)); +const promisify_1 = __importDefault(__nccwpck_require__(2600)); +const debug = debug_1.default('agent-base'); +function isAgent(v) { + return Boolean(v) && typeof v.addRequest === 'function'; +} +function isSecureEndpoint() { + const { stack } = new Error(); + if (typeof stack !== 'string') + return false; + return stack.split('\n').some(l => l.indexOf('(https.js:') !== -1 || l.indexOf('node:https:') !== -1); +} +function createAgent(callback, opts) { + return new createAgent.Agent(callback, opts); +} +(function (createAgent) { + /** + * Base `http.Agent` implementation. + * No pooling/keep-alive is implemented by default. + * + * @param {Function} callback + * @api public + */ + class Agent extends events_1.EventEmitter { + constructor(callback, _opts) { + super(); + let opts = _opts; + if (typeof callback === 'function') { + this.callback = callback; + } + else if (callback) { + opts = callback; + } + // Timeout for the socket to be returned from the callback + this.timeout = null; + if (opts && typeof opts.timeout === 'number') { + this.timeout = opts.timeout; + } + // These aren't actually used by `agent-base`, but are required + // for the TypeScript definition files in `@types/node` :/ + this.maxFreeSockets = 1; + this.maxSockets = 1; + this.maxTotalSockets = Infinity; + this.sockets = {}; + this.freeSockets = {}; + this.requests = {}; + this.options = {}; + } + get defaultPort() { + if (typeof this.explicitDefaultPort === 'number') { + return this.explicitDefaultPort; + } + return isSecureEndpoint() ? 443 : 80; + } + set defaultPort(v) { + this.explicitDefaultPort = v; + } + get protocol() { + if (typeof this.explicitProtocol === 'string') { + return this.explicitProtocol; + } + return isSecureEndpoint() ? 'https:' : 'http:'; + } + set protocol(v) { + this.explicitProtocol = v; + } + callback(req, opts, fn) { + throw new Error('"agent-base" has no default implementation, you must subclass and override `callback()`'); + } + /** + * Called by node-core's "_http_client.js" module when creating + * a new HTTP request with this Agent instance. + * + * @api public + */ + addRequest(req, _opts) { + const opts = Object.assign({}, _opts); + if (typeof opts.secureEndpoint !== 'boolean') { + opts.secureEndpoint = isSecureEndpoint(); + } + if (opts.host == null) { + opts.host = 'localhost'; + } + if (opts.port == null) { + opts.port = opts.secureEndpoint ? 443 : 80; + } + if (opts.protocol == null) { + opts.protocol = opts.secureEndpoint ? 'https:' : 'http:'; + } + if (opts.host && opts.path) { + // If both a `host` and `path` are specified then it's most + // likely the result of a `url.parse()` call... we need to + // remove the `path` portion so that `net.connect()` doesn't + // attempt to open that as a unix socket file. + delete opts.path; + } + delete opts.agent; + delete opts.hostname; + delete opts._defaultAgent; + delete opts.defaultPort; + delete opts.createConnection; + // Hint to use "Connection: close" + // XXX: non-documented `http` module API :( + req._last = true; + req.shouldKeepAlive = false; + let timedOut = false; + let timeoutId = null; + const timeoutMs = opts.timeout || this.timeout; + const onerror = (err) => { + if (req._hadError) + return; + req.emit('error', err); + // For Safety. Some additional errors might fire later on + // and we need to make sure we don't double-fire the error event. + req._hadError = true; + }; + const ontimeout = () => { + timeoutId = null; + timedOut = true; + const err = new Error(`A "socket" was not created for HTTP request before ${timeoutMs}ms`); + err.code = 'ETIMEOUT'; + onerror(err); + }; + const callbackError = (err) => { + if (timedOut) + return; + if (timeoutId !== null) { + clearTimeout(timeoutId); + timeoutId = null; + } + onerror(err); + }; + const onsocket = (socket) => { + if (timedOut) + return; + if (timeoutId != null) { + clearTimeout(timeoutId); + timeoutId = null; + } + if (isAgent(socket)) { + // `socket` is actually an `http.Agent` instance, so + // relinquish responsibility for this `req` to the Agent + // from here on + debug('Callback returned another Agent instance %o', socket.constructor.name); + socket.addRequest(req, opts); + return; + } + if (socket) { + socket.once('free', () => { + this.freeSocket(socket, opts); + }); + req.onSocket(socket); + return; + } + const err = new Error(`no Duplex stream was returned to agent-base for \`${req.method} ${req.path}\``); + onerror(err); + }; + if (typeof this.callback !== 'function') { + onerror(new Error('`callback` is not defined')); + return; + } + if (!this.promisifiedCallback) { + if (this.callback.length >= 3) { + debug('Converting legacy callback function to promise'); + this.promisifiedCallback = promisify_1.default(this.callback); + } + else { + this.promisifiedCallback = this.callback; + } + } + if (typeof timeoutMs === 'number' && timeoutMs > 0) { + timeoutId = setTimeout(ontimeout, timeoutMs); + } + if ('port' in opts && typeof opts.port !== 'number') { + opts.port = Number(opts.port); + } + try { + debug('Resolving socket for %o request: %o', opts.protocol, `${req.method} ${req.path}`); + Promise.resolve(this.promisifiedCallback(req, opts)).then(onsocket, callbackError); + } + catch (err) { + Promise.reject(err).catch(callbackError); + } + } + freeSocket(socket, opts) { + debug('Freeing socket %o %o', socket.constructor.name, opts); + socket.destroy(); + } + destroy() { + debug('Destroying agent %o', this.constructor.name); + } + } + createAgent.Agent = Agent; + // So that `instanceof` works correctly + createAgent.prototype = createAgent.Agent.prototype; +})(createAgent || (createAgent = {})); +module.exports = createAgent; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 2600: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +function promisify(fn) { + return function (req, opts) { + return new Promise((resolve, reject) => { + fn.call(this, req, opts, (err, rtn) => { + if (err) { + reject(err); + } + else { + resolve(rtn); + } + }); + }); + }; +} +exports["default"] = promisify; +//# sourceMappingURL=promisify.js.map + +/***/ }), + +/***/ 3669: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpsProxyAgent = void 0; +const net = __importStar(__nccwpck_require__(9278)); +const tls = __importStar(__nccwpck_require__(4756)); +const assert_1 = __importDefault(__nccwpck_require__(2613)); +const debug_1 = __importDefault(__nccwpck_require__(2830)); +const agent_base_1 = __nccwpck_require__(8894); +const url_1 = __nccwpck_require__(7016); +const parse_proxy_response_1 = __nccwpck_require__(7943); +const debug = (0, debug_1.default)('https-proxy-agent'); +const setServernameFromNonIpHost = (options) => { + if (options.servername === undefined && + options.host && + !net.isIP(options.host)) { + return { + ...options, + servername: options.host, + }; + } + return options; +}; +/** + * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to + * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests. + * + * Outgoing HTTP requests are first tunneled through the proxy server using the + * `CONNECT` HTTP request method to establish a connection to the proxy server, + * and then the proxy server connects to the destination target and issues the + * HTTP request from the proxy server. + * + * `https:` requests have their socket connection upgraded to TLS once + * the connection to the proxy server has been established. + */ +class HttpsProxyAgent extends agent_base_1.Agent { + constructor(proxy, opts) { + super(opts); + this.options = { path: undefined }; + this.proxy = typeof proxy === 'string' ? new url_1.URL(proxy) : proxy; + this.proxyHeaders = opts?.headers ?? {}; + debug('Creating new HttpsProxyAgent instance: %o', this.proxy.href); + // Trim off the brackets from IPv6 addresses + const host = (this.proxy.hostname || this.proxy.host).replace(/^\[|\]$/g, ''); + const port = this.proxy.port + ? parseInt(this.proxy.port, 10) + : this.proxy.protocol === 'https:' + ? 443 + : 80; + this.connectOpts = { + // Attempt to negotiate http/1.1 for proxy servers that support http/2 + ALPNProtocols: ['http/1.1'], + ...(opts ? omit(opts, 'headers') : null), + host, + port, + }; + } + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + */ + async connect(req, opts) { + const { proxy } = this; + if (!opts.host) { + throw new TypeError('No "host" provided'); + } + // Create a socket connection to the proxy server. + let socket; + if (proxy.protocol === 'https:') { + debug('Creating `tls.Socket`: %o', this.connectOpts); + socket = tls.connect(setServernameFromNonIpHost(this.connectOpts)); + } + else { + debug('Creating `net.Socket`: %o', this.connectOpts); + socket = net.connect(this.connectOpts); + } + const headers = typeof this.proxyHeaders === 'function' + ? this.proxyHeaders() + : { ...this.proxyHeaders }; + const host = net.isIPv6(opts.host) ? `[${opts.host}]` : opts.host; + let payload = `CONNECT ${host}:${opts.port} HTTP/1.1\r\n`; + // Inject the `Proxy-Authorization` header if necessary. + if (proxy.username || proxy.password) { + const auth = `${decodeURIComponent(proxy.username)}:${decodeURIComponent(proxy.password)}`; + headers['Proxy-Authorization'] = `Basic ${Buffer.from(auth).toString('base64')}`; + } + headers.Host = `${host}:${opts.port}`; + if (!headers['Proxy-Connection']) { + headers['Proxy-Connection'] = this.keepAlive + ? 'Keep-Alive' + : 'close'; + } + for (const name of Object.keys(headers)) { + payload += `${name}: ${headers[name]}\r\n`; + } + const proxyResponsePromise = (0, parse_proxy_response_1.parseProxyResponse)(socket); + socket.write(`${payload}\r\n`); + const { connect, buffered } = await proxyResponsePromise; + req.emit('proxyConnect', connect); + this.emit('proxyConnect', connect, req); + if (connect.statusCode === 200) { + req.once('socket', resume); + if (opts.secureEndpoint) { + // The proxy is connecting to a TLS server, so upgrade + // this socket connection to a TLS connection. + debug('Upgrading socket connection to TLS'); + return tls.connect({ + ...omit(setServernameFromNonIpHost(opts), 'host', 'path', 'port'), + socket, + }); + } + return socket; + } + // Some other status code that's not 200... need to re-play the HTTP + // header "data" events onto the socket once the HTTP machinery is + // attached so that the node core `http` can parse and handle the + // error status code. + // Close the original socket, and a new "fake" socket is returned + // instead, so that the proxy doesn't get the HTTP request + // written to it (which may contain `Authorization` headers or other + // sensitive data). + // + // See: https://hackerone.com/reports/541502 + socket.destroy(); + const fakeSocket = new net.Socket({ writable: false }); + fakeSocket.readable = true; + // Need to wait for the "socket" event to re-play the "data" events. + req.once('socket', (s) => { + debug('Replaying proxy buffer for failed request'); + (0, assert_1.default)(s.listenerCount('data') > 0); + // Replay the "buffered" Buffer onto the fake `socket`, since at + // this point the HTTP module machinery has been hooked up for + // the user. + s.push(buffered); + s.push(null); + }); + return fakeSocket; + } +} +HttpsProxyAgent.protocols = ['http', 'https']; +exports.HttpsProxyAgent = HttpsProxyAgent; +function resume(socket) { + socket.resume(); +} +function omit(obj, ...keys) { + const ret = {}; + let key; + for (key in obj) { + if (!keys.includes(key)) { + ret[key] = obj[key]; + } + } + return ret; +} +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 7943: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.parseProxyResponse = void 0; +const debug_1 = __importDefault(__nccwpck_require__(2830)); +const debug = (0, debug_1.default)('https-proxy-agent:parse-proxy-response'); +function parseProxyResponse(socket) { + return new Promise((resolve, reject) => { + // we need to buffer any HTTP traffic that happens with the proxy before we get + // the CONNECT response, so that if the response is anything other than an "200" + // response code, then we can re-play the "data" events on the socket once the + // HTTP parser is hooked up... + let buffersLength = 0; + const buffers = []; + function read() { + const b = socket.read(); + if (b) + ondata(b); + else + socket.once('readable', read); + } + function cleanup() { + socket.removeListener('end', onend); + socket.removeListener('error', onerror); + socket.removeListener('readable', read); + } + function onend() { + cleanup(); + debug('onend'); + reject(new Error('Proxy connection ended before receiving CONNECT response')); + } + function onerror(err) { + cleanup(); + debug('onerror %o', err); + reject(err); + } + function ondata(b) { + buffers.push(b); + buffersLength += b.length; + const buffered = Buffer.concat(buffers, buffersLength); + const endOfHeaders = buffered.indexOf('\r\n\r\n'); + if (endOfHeaders === -1) { + // keep buffering + debug('have not received end of HTTP headers yet...'); + read(); + return; + } + const headerParts = buffered + .slice(0, endOfHeaders) + .toString('ascii') + .split('\r\n'); + const firstLine = headerParts.shift(); + if (!firstLine) { + socket.destroy(); + return reject(new Error('No header received from proxy CONNECT response')); + } + const firstLineParts = firstLine.split(' '); + const statusCode = +firstLineParts[1]; + const statusText = firstLineParts.slice(2).join(' '); + const headers = {}; + for (const header of headerParts) { + if (!header) + continue; + const firstColon = header.indexOf(':'); + if (firstColon === -1) { + socket.destroy(); + return reject(new Error(`Invalid header from proxy CONNECT response: "${header}"`)); + } + const key = header.slice(0, firstColon).toLowerCase(); + const value = header.slice(firstColon + 1).trimStart(); + const current = headers[key]; + if (typeof current === 'string') { + headers[key] = [current, value]; + } + else if (Array.isArray(current)) { + current.push(value); + } + else { + headers[key] = value; + } + } + debug('got proxy server response: %o %o', firstLine, headers); + cleanup(); + resolve({ + connect: { + statusCode, + statusText, + headers, + }, + buffered, + }); + } + socket.on('error', onerror); + socket.on('end', onend); + read(); + }); +} +exports.parseProxyResponse = parseProxyResponse; +//# sourceMappingURL=parse-proxy-response.js.map + +/***/ }), + +/***/ 9598: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +try { + var util = __nccwpck_require__(9023); + /* istanbul ignore next */ + if (typeof util.inherits !== 'function') throw ''; + module.exports = util.inherits; +} catch (e) { + /* istanbul ignore next */ + module.exports = __nccwpck_require__(6589); +} + + +/***/ }), + +/***/ 6589: +/***/ ((module) => { + +if (typeof Object.create === 'function') { + // implementation from standard node.js 'util' module + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + ctor.prototype = Object.create(superCtor.prototype, { + constructor: { + value: ctor, + enumerable: false, + writable: true, + configurable: true + } + }) + } + }; +} else { + // old school shim for old browsers + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + var TempCtor = function () {} + TempCtor.prototype = superCtor.prototype + ctor.prototype = new TempCtor() + ctor.prototype.constructor = ctor + } + } +} + + +/***/ }), + +/***/ 6543: +/***/ ((module) => { + +"use strict"; + + +const isStream = stream => + stream !== null && + typeof stream === 'object' && + typeof stream.pipe === 'function'; + +isStream.writable = stream => + isStream(stream) && + stream.writable !== false && + typeof stream._write === 'function' && + typeof stream._writableState === 'object'; + +isStream.readable = stream => + isStream(stream) && + stream.readable !== false && + typeof stream._read === 'function' && + typeof stream._readableState === 'object'; + +isStream.duplex = stream => + isStream.writable(stream) && + isStream.readable(stream); + +isStream.transform = stream => + isStream.duplex(stream) && + typeof stream._transform === 'function'; + +module.exports = isStream; + + +/***/ }), + +/***/ 4826: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var json_stringify = (__nccwpck_require__(3651).stringify); +var json_parse = __nccwpck_require__(3197); + +module.exports = function(options) { + return { + parse: json_parse(options), + stringify: json_stringify + } +}; +//create the default method members with no options applied for backwards compatibility +module.exports.parse = json_parse(); +module.exports.stringify = json_stringify; + + +/***/ }), + +/***/ 3197: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var BigNumber = null; + +// regexpxs extracted from +// (c) BSD-3-Clause +// https://github.com/fastify/secure-json-parse/graphs/contributors and https://github.com/hapijs/bourne/graphs/contributors + +const suspectProtoRx = /(?:_|\\u005[Ff])(?:_|\\u005[Ff])(?:p|\\u0070)(?:r|\\u0072)(?:o|\\u006[Ff])(?:t|\\u0074)(?:o|\\u006[Ff])(?:_|\\u005[Ff])(?:_|\\u005[Ff])/; +const suspectConstructorRx = /(?:c|\\u0063)(?:o|\\u006[Ff])(?:n|\\u006[Ee])(?:s|\\u0073)(?:t|\\u0074)(?:r|\\u0072)(?:u|\\u0075)(?:c|\\u0063)(?:t|\\u0074)(?:o|\\u006[Ff])(?:r|\\u0072)/; + +/* + json_parse.js + 2012-06-20 + + Public Domain. + + NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK. + + This file creates a json_parse function. + During create you can (optionally) specify some behavioural switches + + require('json-bigint')(options) + + The optional options parameter holds switches that drive certain + aspects of the parsing process: + * options.strict = true will warn about duplicate-key usage in the json. + The default (strict = false) will silently ignore those and overwrite + values for keys that are in duplicate use. + + The resulting function follows this signature: + json_parse(text, reviver) + This method parses a JSON text to produce an object or array. + It can throw a SyntaxError exception. + + The optional reviver parameter is a function that can filter and + transform the results. It receives each of the keys and values, + and its return value is used instead of the original value. + If it returns what it received, then the structure is not modified. + If it returns undefined then the member is deleted. + + Example: + + // Parse the text. Values that look like ISO date strings will + // be converted to Date objects. + + myData = json_parse(text, function (key, value) { + var a; + if (typeof value === 'string') { + a = +/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value); + if (a) { + return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4], + +a[5], +a[6])); + } + } + return value; + }); + + This is a reference implementation. You are free to copy, modify, or + redistribute. + + This code should be minified before deployment. + See http://javascript.crockford.com/jsmin.html + + USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO + NOT CONTROL. +*/ + +/*members "", "\"", "\/", "\\", at, b, call, charAt, f, fromCharCode, + hasOwnProperty, message, n, name, prototype, push, r, t, text +*/ + +var json_parse = function (options) { + 'use strict'; + + // This is a function that can parse a JSON text, producing a JavaScript + // data structure. It is a simple, recursive descent parser. It does not use + // eval or regular expressions, so it can be used as a model for implementing + // a JSON parser in other languages. + + // We are defining the function inside of another function to avoid creating + // global variables. + + // Default options one can override by passing options to the parse() + var _options = { + strict: false, // not being strict means do not generate syntax errors for "duplicate key" + storeAsString: false, // toggles whether the values should be stored as BigNumber (default) or a string + alwaysParseAsBig: false, // toggles whether all numbers should be Big + useNativeBigInt: false, // toggles whether to use native BigInt instead of bignumber.js + protoAction: 'error', + constructorAction: 'error', + }; + + // If there are options, then use them to override the default _options + if (options !== undefined && options !== null) { + if (options.strict === true) { + _options.strict = true; + } + if (options.storeAsString === true) { + _options.storeAsString = true; + } + _options.alwaysParseAsBig = + options.alwaysParseAsBig === true ? options.alwaysParseAsBig : false; + _options.useNativeBigInt = + options.useNativeBigInt === true ? options.useNativeBigInt : false; + + if (typeof options.constructorAction !== 'undefined') { + if ( + options.constructorAction === 'error' || + options.constructorAction === 'ignore' || + options.constructorAction === 'preserve' + ) { + _options.constructorAction = options.constructorAction; + } else { + throw new Error( + `Incorrect value for constructorAction option, must be "error", "ignore" or undefined but passed ${options.constructorAction}` + ); + } + } + + if (typeof options.protoAction !== 'undefined') { + if ( + options.protoAction === 'error' || + options.protoAction === 'ignore' || + options.protoAction === 'preserve' + ) { + _options.protoAction = options.protoAction; + } else { + throw new Error( + `Incorrect value for protoAction option, must be "error", "ignore" or undefined but passed ${options.protoAction}` + ); + } + } + } + + var at, // The index of the current character + ch, // The current character + escapee = { + '"': '"', + '\\': '\\', + '/': '/', + b: '\b', + f: '\f', + n: '\n', + r: '\r', + t: '\t', + }, + text, + error = function (m) { + // Call error when something is wrong. + + throw { + name: 'SyntaxError', + message: m, + at: at, + text: text, + }; + }, + next = function (c) { + // If a c parameter is provided, verify that it matches the current character. + + if (c && c !== ch) { + error("Expected '" + c + "' instead of '" + ch + "'"); + } + + // Get the next character. When there are no more characters, + // return the empty string. + + ch = text.charAt(at); + at += 1; + return ch; + }, + number = function () { + // Parse a number value. + + var number, + string = ''; + + if (ch === '-') { + string = '-'; + next('-'); + } + while (ch >= '0' && ch <= '9') { + string += ch; + next(); + } + if (ch === '.') { + string += '.'; + while (next() && ch >= '0' && ch <= '9') { + string += ch; + } + } + if (ch === 'e' || ch === 'E') { + string += ch; + next(); + if (ch === '-' || ch === '+') { + string += ch; + next(); + } + while (ch >= '0' && ch <= '9') { + string += ch; + next(); + } + } + number = +string; + if (!isFinite(number)) { + error('Bad number'); + } else { + if (BigNumber == null) BigNumber = __nccwpck_require__(1259); + //if (number > 9007199254740992 || number < -9007199254740992) + // Bignumber has stricter check: everything with length > 15 digits disallowed + if (string.length > 15) + return _options.storeAsString + ? string + : _options.useNativeBigInt + ? BigInt(string) + : new BigNumber(string); + else + return !_options.alwaysParseAsBig + ? number + : _options.useNativeBigInt + ? BigInt(number) + : new BigNumber(number); + } + }, + string = function () { + // Parse a string value. + + var hex, + i, + string = '', + uffff; + + // When parsing for string values, we must look for " and \ characters. + + if (ch === '"') { + var startAt = at; + while (next()) { + if (ch === '"') { + if (at - 1 > startAt) string += text.substring(startAt, at - 1); + next(); + return string; + } + if (ch === '\\') { + if (at - 1 > startAt) string += text.substring(startAt, at - 1); + next(); + if (ch === 'u') { + uffff = 0; + for (i = 0; i < 4; i += 1) { + hex = parseInt(next(), 16); + if (!isFinite(hex)) { + break; + } + uffff = uffff * 16 + hex; + } + string += String.fromCharCode(uffff); + } else if (typeof escapee[ch] === 'string') { + string += escapee[ch]; + } else { + break; + } + startAt = at; + } + } + } + error('Bad string'); + }, + white = function () { + // Skip whitespace. + + while (ch && ch <= ' ') { + next(); + } + }, + word = function () { + // true, false, or null. + + switch (ch) { + case 't': + next('t'); + next('r'); + next('u'); + next('e'); + return true; + case 'f': + next('f'); + next('a'); + next('l'); + next('s'); + next('e'); + return false; + case 'n': + next('n'); + next('u'); + next('l'); + next('l'); + return null; + } + error("Unexpected '" + ch + "'"); + }, + value, // Place holder for the value function. + array = function () { + // Parse an array value. + + var array = []; + + if (ch === '[') { + next('['); + white(); + if (ch === ']') { + next(']'); + return array; // empty array + } + while (ch) { + array.push(value()); + white(); + if (ch === ']') { + next(']'); + return array; + } + next(','); + white(); + } + } + error('Bad array'); + }, + object = function () { + // Parse an object value. + + var key, + object = Object.create(null); + + if (ch === '{') { + next('{'); + white(); + if (ch === '}') { + next('}'); + return object; // empty object + } + while (ch) { + key = string(); + white(); + next(':'); + if ( + _options.strict === true && + Object.hasOwnProperty.call(object, key) + ) { + error('Duplicate key "' + key + '"'); + } + + if (suspectProtoRx.test(key) === true) { + if (_options.protoAction === 'error') { + error('Object contains forbidden prototype property'); + } else if (_options.protoAction === 'ignore') { + value(); + } else { + object[key] = value(); + } + } else if (suspectConstructorRx.test(key) === true) { + if (_options.constructorAction === 'error') { + error('Object contains forbidden constructor property'); + } else if (_options.constructorAction === 'ignore') { + value(); + } else { + object[key] = value(); + } + } else { + object[key] = value(); + } + + white(); + if (ch === '}') { + next('}'); + return object; + } + next(','); + white(); + } + } + error('Bad object'); + }; + + value = function () { + // Parse a JSON value. It could be an object, an array, a string, a number, + // or a word. + + white(); + switch (ch) { + case '{': + return object(); + case '[': + return array(); + case '"': + return string(); + case '-': + return number(); + default: + return ch >= '0' && ch <= '9' ? number() : word(); + } + }; + + // Return the json_parse function. It will have access to all of the above + // functions and variables. + + return function (source, reviver) { + var result; + + text = source + ''; + at = 0; + ch = ' '; + result = value(); + white(); + if (ch) { + error('Syntax error'); + } + + // If there is a reviver function, we recursively walk the new structure, + // passing each name/value pair to the reviver function for possible + // transformation, starting with a temporary root object that holds the result + // in an empty key. If there is not a reviver function, we simply return the + // result. + + return typeof reviver === 'function' + ? (function walk(holder, key) { + var k, + v, + value = holder[key]; + if (value && typeof value === 'object') { + Object.keys(value).forEach(function (k) { + v = walk(value, k); + if (v !== undefined) { + value[k] = v; + } else { + delete value[k]; + } + }); + } + return reviver.call(holder, key, value); + })({ '': result }, '') + : result; + }; +}; + +module.exports = json_parse; + + +/***/ }), + +/***/ 3651: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var BigNumber = __nccwpck_require__(1259); + +/* + json2.js + 2013-05-26 + + Public Domain. + + NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK. + + See http://www.JSON.org/js.html + + + This code should be minified before deployment. + See http://javascript.crockford.com/jsmin.html + + USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO + NOT CONTROL. + + + This file creates a global JSON object containing two methods: stringify + and parse. + + JSON.stringify(value, replacer, space) + value any JavaScript value, usually an object or array. + + replacer an optional parameter that determines how object + values are stringified for objects. It can be a + function or an array of strings. + + space an optional parameter that specifies the indentation + of nested structures. If it is omitted, the text will + be packed without extra whitespace. If it is a number, + it will specify the number of spaces to indent at each + level. If it is a string (such as '\t' or ' '), + it contains the characters used to indent at each level. + + This method produces a JSON text from a JavaScript value. + + When an object value is found, if the object contains a toJSON + method, its toJSON method will be called and the result will be + stringified. A toJSON method does not serialize: it returns the + value represented by the name/value pair that should be serialized, + or undefined if nothing should be serialized. The toJSON method + will be passed the key associated with the value, and this will be + bound to the value + + For example, this would serialize Dates as ISO strings. + + Date.prototype.toJSON = function (key) { + function f(n) { + // Format integers to have at least two digits. + return n < 10 ? '0' + n : n; + } + + return this.getUTCFullYear() + '-' + + f(this.getUTCMonth() + 1) + '-' + + f(this.getUTCDate()) + 'T' + + f(this.getUTCHours()) + ':' + + f(this.getUTCMinutes()) + ':' + + f(this.getUTCSeconds()) + 'Z'; + }; + + You can provide an optional replacer method. It will be passed the + key and value of each member, with this bound to the containing + object. The value that is returned from your method will be + serialized. If your method returns undefined, then the member will + be excluded from the serialization. + + If the replacer parameter is an array of strings, then it will be + used to select the members to be serialized. It filters the results + such that only members with keys listed in the replacer array are + stringified. + + Values that do not have JSON representations, such as undefined or + functions, will not be serialized. Such values in objects will be + dropped; in arrays they will be replaced with null. You can use + a replacer function to replace those with JSON values. + JSON.stringify(undefined) returns undefined. + + The optional space parameter produces a stringification of the + value that is filled with line breaks and indentation to make it + easier to read. + + If the space parameter is a non-empty string, then that string will + be used for indentation. If the space parameter is a number, then + the indentation will be that many spaces. + + Example: + + text = JSON.stringify(['e', {pluribus: 'unum'}]); + // text is '["e",{"pluribus":"unum"}]' + + + text = JSON.stringify(['e', {pluribus: 'unum'}], null, '\t'); + // text is '[\n\t"e",\n\t{\n\t\t"pluribus": "unum"\n\t}\n]' + + text = JSON.stringify([new Date()], function (key, value) { + return this[key] instanceof Date ? + 'Date(' + this[key] + ')' : value; + }); + // text is '["Date(---current time---)"]' + + + JSON.parse(text, reviver) + This method parses a JSON text to produce an object or array. + It can throw a SyntaxError exception. + + The optional reviver parameter is a function that can filter and + transform the results. It receives each of the keys and values, + and its return value is used instead of the original value. + If it returns what it received, then the structure is not modified. + If it returns undefined then the member is deleted. + + Example: + + // Parse the text. Values that look like ISO date strings will + // be converted to Date objects. + + myData = JSON.parse(text, function (key, value) { + var a; + if (typeof value === 'string') { + a = +/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value); + if (a) { + return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4], + +a[5], +a[6])); + } + } + return value; + }); + + myData = JSON.parse('["Date(09/09/2001)"]', function (key, value) { + var d; + if (typeof value === 'string' && + value.slice(0, 5) === 'Date(' && + value.slice(-1) === ')') { + d = new Date(value.slice(5, -1)); + if (d) { + return d; + } + } + return value; + }); + + + This is a reference implementation. You are free to copy, modify, or + redistribute. +*/ + +/*jslint evil: true, regexp: true */ + +/*members "", "\b", "\t", "\n", "\f", "\r", "\"", JSON, "\\", apply, + call, charCodeAt, getUTCDate, getUTCFullYear, getUTCHours, + getUTCMinutes, getUTCMonth, getUTCSeconds, hasOwnProperty, join, + lastIndex, length, parse, prototype, push, replace, slice, stringify, + test, toJSON, toString, valueOf +*/ + + +// Create a JSON object only if one does not already exist. We create the +// methods in a closure to avoid creating global variables. + +var JSON = module.exports; + +(function () { + 'use strict'; + + function f(n) { + // Format integers to have at least two digits. + return n < 10 ? '0' + n : n; + } + + var cx = /[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, + escapable = /[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, + gap, + indent, + meta = { // table of character substitutions + '\b': '\\b', + '\t': '\\t', + '\n': '\\n', + '\f': '\\f', + '\r': '\\r', + '"' : '\\"', + '\\': '\\\\' + }, + rep; + + + function quote(string) { + +// If the string contains no control characters, no quote characters, and no +// backslash characters, then we can safely slap some quotes around it. +// Otherwise we must also replace the offending characters with safe escape +// sequences. + + escapable.lastIndex = 0; + return escapable.test(string) ? '"' + string.replace(escapable, function (a) { + var c = meta[a]; + return typeof c === 'string' + ? c + : '\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4); + }) + '"' : '"' + string + '"'; + } + + + function str(key, holder) { + +// Produce a string from holder[key]. + + var i, // The loop counter. + k, // The member key. + v, // The member value. + length, + mind = gap, + partial, + value = holder[key], + isBigNumber = value != null && (value instanceof BigNumber || BigNumber.isBigNumber(value)); + +// If the value has a toJSON method, call it to obtain a replacement value. + + if (value && typeof value === 'object' && + typeof value.toJSON === 'function') { + value = value.toJSON(key); + } + +// If we were called with a replacer function, then call the replacer to +// obtain a replacement value. + + if (typeof rep === 'function') { + value = rep.call(holder, key, value); + } + +// What happens next depends on the value's type. + + switch (typeof value) { + case 'string': + if (isBigNumber) { + return value; + } else { + return quote(value); + } + + case 'number': + +// JSON numbers must be finite. Encode non-finite numbers as null. + + return isFinite(value) ? String(value) : 'null'; + + case 'boolean': + case 'null': + case 'bigint': + +// If the value is a boolean or null, convert it to a string. Note: +// typeof null does not produce 'null'. The case is included here in +// the remote chance that this gets fixed someday. + + return String(value); + +// If the type is 'object', we might be dealing with an object or an array or +// null. + + case 'object': + +// Due to a specification blunder in ECMAScript, typeof null is 'object', +// so watch out for that case. + + if (!value) { + return 'null'; + } + +// Make an array to hold the partial results of stringifying this object value. + + gap += indent; + partial = []; + +// Is the value an array? + + if (Object.prototype.toString.apply(value) === '[object Array]') { + +// The value is an array. Stringify every element. Use null as a placeholder +// for non-JSON values. + + length = value.length; + for (i = 0; i < length; i += 1) { + partial[i] = str(i, value) || 'null'; + } + +// Join all of the elements together, separated with commas, and wrap them in +// brackets. + + v = partial.length === 0 + ? '[]' + : gap + ? '[\n' + gap + partial.join(',\n' + gap) + '\n' + mind + ']' + : '[' + partial.join(',') + ']'; + gap = mind; + return v; + } + +// If the replacer is an array, use it to select the members to be stringified. + + if (rep && typeof rep === 'object') { + length = rep.length; + for (i = 0; i < length; i += 1) { + if (typeof rep[i] === 'string') { + k = rep[i]; + v = str(k, value); + if (v) { + partial.push(quote(k) + (gap ? ': ' : ':') + v); + } + } + } + } else { + +// Otherwise, iterate through all of the keys in the object. + + Object.keys(value).forEach(function(k) { + var v = str(k, value); + if (v) { + partial.push(quote(k) + (gap ? ': ' : ':') + v); + } + }); + } + +// Join all of the member texts together, separated with commas, +// and wrap them in braces. + + v = partial.length === 0 + ? '{}' + : gap + ? '{\n' + gap + partial.join(',\n' + gap) + '\n' + mind + '}' + : '{' + partial.join(',') + '}'; + gap = mind; + return v; + } + } + +// If the JSON object does not yet have a stringify method, give it one. + + if (typeof JSON.stringify !== 'function') { + JSON.stringify = function (value, replacer, space) { + +// The stringify method takes a value and an optional replacer, and an optional +// space parameter, and returns a JSON text. The replacer can be a function +// that can replace values, or an array of strings that will select the keys. +// A default replacer method can be provided. Use of the space parameter can +// produce text that is more easily readable. + + var i; + gap = ''; + indent = ''; + +// If the space parameter is a number, make an indent string containing that +// many spaces. + + if (typeof space === 'number') { + for (i = 0; i < space; i += 1) { + indent += ' '; + } + +// If the space parameter is a string, it will be used as the indent string. + + } else if (typeof space === 'string') { + indent = space; + } + +// If there is a replacer, it must be a function or an array. +// Otherwise, throw an error. + + rep = replacer; + if (replacer && typeof replacer !== 'function' && + (typeof replacer !== 'object' || + typeof replacer.length !== 'number')) { + throw new Error('JSON.stringify'); + } + +// Make a fake root object containing our value under the key of ''. +// Return the result of stringifying the value. + + return str('', {'': value}); + }; + } +}()); + + +/***/ }), + +/***/ 8622: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var bufferEqual = __nccwpck_require__(9732); +var Buffer = (__nccwpck_require__(3058).Buffer); +var crypto = __nccwpck_require__(6982); +var formatEcdsa = __nccwpck_require__(325); +var util = __nccwpck_require__(9023); + +var MSG_INVALID_ALGORITHM = '"%s" is not a valid algorithm.\n Supported algorithms are:\n "HS256", "HS384", "HS512", "RS256", "RS384", "RS512", "PS256", "PS384", "PS512", "ES256", "ES384", "ES512" and "none".' +var MSG_INVALID_SECRET = 'secret must be a string or buffer'; +var MSG_INVALID_VERIFIER_KEY = 'key must be a string or a buffer'; +var MSG_INVALID_SIGNER_KEY = 'key must be a string, a buffer or an object'; + +var supportsKeyObjects = typeof crypto.createPublicKey === 'function'; +if (supportsKeyObjects) { + MSG_INVALID_VERIFIER_KEY += ' or a KeyObject'; + MSG_INVALID_SECRET += 'or a KeyObject'; +} + +function checkIsPublicKey(key) { + if (Buffer.isBuffer(key)) { + return; + } + + if (typeof key === 'string') { + return; + } + + if (!supportsKeyObjects) { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key !== 'object') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key.type !== 'string') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key.asymmetricKeyType !== 'string') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key.export !== 'function') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } +}; + +function checkIsPrivateKey(key) { + if (Buffer.isBuffer(key)) { + return; + } + + if (typeof key === 'string') { + return; + } + + if (typeof key === 'object') { + return; + } + + throw typeError(MSG_INVALID_SIGNER_KEY); +}; + +function checkIsSecretKey(key) { + if (Buffer.isBuffer(key)) { + return; + } + + if (typeof key === 'string') { + return key; + } + + if (!supportsKeyObjects) { + throw typeError(MSG_INVALID_SECRET); + } + + if (typeof key !== 'object') { + throw typeError(MSG_INVALID_SECRET); + } + + if (key.type !== 'secret') { + throw typeError(MSG_INVALID_SECRET); + } + + if (typeof key.export !== 'function') { + throw typeError(MSG_INVALID_SECRET); + } +} + +function fromBase64(base64) { + return base64 + .replace(/=/g, '') + .replace(/\+/g, '-') + .replace(/\//g, '_'); +} + +function toBase64(base64url) { + base64url = base64url.toString(); + + var padding = 4 - base64url.length % 4; + if (padding !== 4) { + for (var i = 0; i < padding; ++i) { + base64url += '='; + } + } + + return base64url + .replace(/\-/g, '+') + .replace(/_/g, '/'); +} + +function typeError(template) { + var args = [].slice.call(arguments, 1); + var errMsg = util.format.bind(util, template).apply(null, args); + return new TypeError(errMsg); +} + +function bufferOrString(obj) { + return Buffer.isBuffer(obj) || typeof obj === 'string'; +} + +function normalizeInput(thing) { + if (!bufferOrString(thing)) + thing = JSON.stringify(thing); + return thing; +} + +function createHmacSigner(bits) { + return function sign(thing, secret) { + checkIsSecretKey(secret); + thing = normalizeInput(thing); + var hmac = crypto.createHmac('sha' + bits, secret); + var sig = (hmac.update(thing), hmac.digest('base64')) + return fromBase64(sig); + } +} + +function createHmacVerifier(bits) { + return function verify(thing, signature, secret) { + var computedSig = createHmacSigner(bits)(thing, secret); + return bufferEqual(Buffer.from(signature), Buffer.from(computedSig)); + } +} + +function createKeySigner(bits) { + return function sign(thing, privateKey) { + checkIsPrivateKey(privateKey); + thing = normalizeInput(thing); + // Even though we are specifying "RSA" here, this works with ECDSA + // keys as well. + var signer = crypto.createSign('RSA-SHA' + bits); + var sig = (signer.update(thing), signer.sign(privateKey, 'base64')); + return fromBase64(sig); + } +} + +function createKeyVerifier(bits) { + return function verify(thing, signature, publicKey) { + checkIsPublicKey(publicKey); + thing = normalizeInput(thing); + signature = toBase64(signature); + var verifier = crypto.createVerify('RSA-SHA' + bits); + verifier.update(thing); + return verifier.verify(publicKey, signature, 'base64'); + } +} + +function createPSSKeySigner(bits) { + return function sign(thing, privateKey) { + checkIsPrivateKey(privateKey); + thing = normalizeInput(thing); + var signer = crypto.createSign('RSA-SHA' + bits); + var sig = (signer.update(thing), signer.sign({ + key: privateKey, + padding: crypto.constants.RSA_PKCS1_PSS_PADDING, + saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST + }, 'base64')); + return fromBase64(sig); + } +} + +function createPSSKeyVerifier(bits) { + return function verify(thing, signature, publicKey) { + checkIsPublicKey(publicKey); + thing = normalizeInput(thing); + signature = toBase64(signature); + var verifier = crypto.createVerify('RSA-SHA' + bits); + verifier.update(thing); + return verifier.verify({ + key: publicKey, + padding: crypto.constants.RSA_PKCS1_PSS_PADDING, + saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST + }, signature, 'base64'); + } +} + +function createECDSASigner(bits) { + var inner = createKeySigner(bits); + return function sign() { + var signature = inner.apply(null, arguments); + signature = formatEcdsa.derToJose(signature, 'ES' + bits); + return signature; + }; +} + +function createECDSAVerifer(bits) { + var inner = createKeyVerifier(bits); + return function verify(thing, signature, publicKey) { + signature = formatEcdsa.joseToDer(signature, 'ES' + bits).toString('base64'); + var result = inner(thing, signature, publicKey); + return result; + }; +} + +function createNoneSigner() { + return function sign() { + return ''; + } +} + +function createNoneVerifier() { + return function verify(thing, signature) { + return signature === ''; + } +} + +module.exports = function jwa(algorithm) { + var signerFactories = { + hs: createHmacSigner, + rs: createKeySigner, + ps: createPSSKeySigner, + es: createECDSASigner, + none: createNoneSigner, + } + var verifierFactories = { + hs: createHmacVerifier, + rs: createKeyVerifier, + ps: createPSSKeyVerifier, + es: createECDSAVerifer, + none: createNoneVerifier, + } + var match = algorithm.match(/^(RS|PS|ES|HS)(256|384|512)$|^(none)$/); + if (!match) + throw typeError(MSG_INVALID_ALGORITHM, algorithm); + var algo = (match[1] || match[3]).toLowerCase(); + var bits = match[2]; + + return { + sign: signerFactories[algo](bits), + verify: verifierFactories[algo](bits), + } +}; + + +/***/ }), + +/***/ 3324: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +/*global exports*/ +var SignStream = __nccwpck_require__(8600); +var VerifyStream = __nccwpck_require__(4368); + +var ALGORITHMS = [ + 'HS256', 'HS384', 'HS512', + 'RS256', 'RS384', 'RS512', + 'PS256', 'PS384', 'PS512', + 'ES256', 'ES384', 'ES512' +]; + +exports.ALGORITHMS = ALGORITHMS; +exports.sign = SignStream.sign; +exports.verify = VerifyStream.verify; +exports.decode = VerifyStream.decode; +exports.isValid = VerifyStream.isValid; +exports.createSign = function createSign(opts) { + return new SignStream(opts); +}; +exports.createVerify = function createVerify(opts) { + return new VerifyStream(opts); +}; + + +/***/ }), + +/***/ 1831: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/*global module, process*/ +var Buffer = (__nccwpck_require__(3058).Buffer); +var Stream = __nccwpck_require__(2203); +var util = __nccwpck_require__(9023); + +function DataStream(data) { + this.buffer = null; + this.writable = true; + this.readable = true; + + // No input + if (!data) { + this.buffer = Buffer.alloc(0); + return this; + } + + // Stream + if (typeof data.pipe === 'function') { + this.buffer = Buffer.alloc(0); + data.pipe(this); + return this; + } + + // Buffer or String + // or Object (assumedly a passworded key) + if (data.length || typeof data === 'object') { + this.buffer = data; + this.writable = false; + process.nextTick(function () { + this.emit('end', data); + this.readable = false; + this.emit('close'); + }.bind(this)); + return this; + } + + throw new TypeError('Unexpected data type ('+ typeof data + ')'); +} +util.inherits(DataStream, Stream); + +DataStream.prototype.write = function write(data) { + this.buffer = Buffer.concat([this.buffer, Buffer.from(data)]); + this.emit('data', data); +}; + +DataStream.prototype.end = function end(data) { + if (data) + this.write(data); + this.emit('end', data); + this.emit('close'); + this.writable = false; + this.readable = false; +}; + +module.exports = DataStream; + + +/***/ }), + +/***/ 8600: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/*global module*/ +var Buffer = (__nccwpck_require__(3058).Buffer); +var DataStream = __nccwpck_require__(1831); +var jwa = __nccwpck_require__(8622); +var Stream = __nccwpck_require__(2203); +var toString = __nccwpck_require__(5126); +var util = __nccwpck_require__(9023); + +function base64url(string, encoding) { + return Buffer + .from(string, encoding) + .toString('base64') + .replace(/=/g, '') + .replace(/\+/g, '-') + .replace(/\//g, '_'); +} + +function jwsSecuredInput(header, payload, encoding) { + encoding = encoding || 'utf8'; + var encodedHeader = base64url(toString(header), 'binary'); + var encodedPayload = base64url(toString(payload), encoding); + return util.format('%s.%s', encodedHeader, encodedPayload); +} + +function jwsSign(opts) { + var header = opts.header; + var payload = opts.payload; + var secretOrKey = opts.secret || opts.privateKey; + var encoding = opts.encoding; + var algo = jwa(header.alg); + var securedInput = jwsSecuredInput(header, payload, encoding); + var signature = algo.sign(securedInput, secretOrKey); + return util.format('%s.%s', securedInput, signature); +} + +function SignStream(opts) { + var secret = opts.secret||opts.privateKey||opts.key; + var secretStream = new DataStream(secret); + this.readable = true; + this.header = opts.header; + this.encoding = opts.encoding; + this.secret = this.privateKey = this.key = secretStream; + this.payload = new DataStream(opts.payload); + this.secret.once('close', function () { + if (!this.payload.writable && this.readable) + this.sign(); + }.bind(this)); + + this.payload.once('close', function () { + if (!this.secret.writable && this.readable) + this.sign(); + }.bind(this)); +} +util.inherits(SignStream, Stream); + +SignStream.prototype.sign = function sign() { + try { + var signature = jwsSign({ + header: this.header, + payload: this.payload.buffer, + secret: this.secret.buffer, + encoding: this.encoding + }); + this.emit('done', signature); + this.emit('data', signature); + this.emit('end'); + this.readable = false; + return signature; + } catch (e) { + this.readable = false; + this.emit('error', e); + this.emit('close'); + } +}; + +SignStream.sign = jwsSign; + +module.exports = SignStream; + + +/***/ }), + +/***/ 5126: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/*global module*/ +var Buffer = (__nccwpck_require__(181).Buffer); + +module.exports = function toString(obj) { + if (typeof obj === 'string') + return obj; + if (typeof obj === 'number' || Buffer.isBuffer(obj)) + return obj.toString(); + return JSON.stringify(obj); +}; + + +/***/ }), + +/***/ 4368: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/*global module*/ +var Buffer = (__nccwpck_require__(3058).Buffer); +var DataStream = __nccwpck_require__(1831); +var jwa = __nccwpck_require__(8622); +var Stream = __nccwpck_require__(2203); +var toString = __nccwpck_require__(5126); +var util = __nccwpck_require__(9023); +var JWS_REGEX = /^[a-zA-Z0-9\-_]+?\.[a-zA-Z0-9\-_]+?\.([a-zA-Z0-9\-_]+)?$/; + +function isObject(thing) { + return Object.prototype.toString.call(thing) === '[object Object]'; +} + +function safeJsonParse(thing) { + if (isObject(thing)) + return thing; + try { return JSON.parse(thing); } + catch (e) { return undefined; } +} + +function headerFromJWS(jwsSig) { + var encodedHeader = jwsSig.split('.', 1)[0]; + return safeJsonParse(Buffer.from(encodedHeader, 'base64').toString('binary')); +} + +function securedInputFromJWS(jwsSig) { + return jwsSig.split('.', 2).join('.'); +} + +function signatureFromJWS(jwsSig) { + return jwsSig.split('.')[2]; +} + +function payloadFromJWS(jwsSig, encoding) { + encoding = encoding || 'utf8'; + var payload = jwsSig.split('.')[1]; + return Buffer.from(payload, 'base64').toString(encoding); +} + +function isValidJws(string) { + return JWS_REGEX.test(string) && !!headerFromJWS(string); +} + +function jwsVerify(jwsSig, algorithm, secretOrKey) { + if (!algorithm) { + var err = new Error("Missing algorithm parameter for jws.verify"); + err.code = "MISSING_ALGORITHM"; + throw err; + } + jwsSig = toString(jwsSig); + var signature = signatureFromJWS(jwsSig); + var securedInput = securedInputFromJWS(jwsSig); + var algo = jwa(algorithm); + return algo.verify(securedInput, signature, secretOrKey); +} + +function jwsDecode(jwsSig, opts) { + opts = opts || {}; + jwsSig = toString(jwsSig); + + if (!isValidJws(jwsSig)) + return null; + + var header = headerFromJWS(jwsSig); + + if (!header) + return null; + + var payload = payloadFromJWS(jwsSig); + if (header.typ === 'JWT' || opts.json) + payload = JSON.parse(payload, opts.encoding); + + return { + header: header, + payload: payload, + signature: signatureFromJWS(jwsSig) + }; +} + +function VerifyStream(opts) { + opts = opts || {}; + var secretOrKey = opts.secret||opts.publicKey||opts.key; + var secretStream = new DataStream(secretOrKey); + this.readable = true; + this.algorithm = opts.algorithm; + this.encoding = opts.encoding; + this.secret = this.publicKey = this.key = secretStream; + this.signature = new DataStream(opts.signature); + this.secret.once('close', function () { + if (!this.signature.writable && this.readable) + this.verify(); + }.bind(this)); + + this.signature.once('close', function () { + if (!this.secret.writable && this.readable) + this.verify(); + }.bind(this)); +} +util.inherits(VerifyStream, Stream); +VerifyStream.prototype.verify = function verify() { + try { + var valid = jwsVerify(this.signature.buffer, this.algorithm, this.key.buffer); + var obj = jwsDecode(this.signature.buffer, this.encoding); + this.emit('done', valid, obj); + this.emit('data', valid); + this.emit('end'); + this.readable = false; + return valid; + } catch (e) { + this.readable = false; + this.emit('error', e); + this.emit('close'); + } +}; + +VerifyStream.decode = jwsDecode; +VerifyStream.isValid = isValidJws; +VerifyStream.verify = jwsVerify; + +module.exports = VerifyStream; + + /***/ }), /***/ 9829: @@ -51564,6 +71403,137 @@ function populateMaps (extensions, types) { } +/***/ }), + +/***/ 4402: +/***/ ((module) => { + +"use strict"; + + +/** + * @param typeMap [Object] Map of MIME type -> Array[extensions] + * @param ... + */ +function Mime() { + this._types = Object.create(null); + this._extensions = Object.create(null); + + for (let i = 0; i < arguments.length; i++) { + this.define(arguments[i]); + } + + this.define = this.define.bind(this); + this.getType = this.getType.bind(this); + this.getExtension = this.getExtension.bind(this); +} + +/** + * Define mimetype -> extension mappings. Each key is a mime-type that maps + * to an array of extensions associated with the type. The first extension is + * used as the default extension for the type. + * + * e.g. mime.define({'audio/ogg', ['oga', 'ogg', 'spx']}); + * + * If a type declares an extension that has already been defined, an error will + * be thrown. To suppress this error and force the extension to be associated + * with the new type, pass `force`=true. Alternatively, you may prefix the + * extension with "*" to map the type to extension, without mapping the + * extension to the type. + * + * e.g. mime.define({'audio/wav', ['wav']}, {'audio/x-wav', ['*wav']}); + * + * + * @param map (Object) type definitions + * @param force (Boolean) if true, force overriding of existing definitions + */ +Mime.prototype.define = function(typeMap, force) { + for (let type in typeMap) { + let extensions = typeMap[type].map(function(t) { + return t.toLowerCase(); + }); + type = type.toLowerCase(); + + for (let i = 0; i < extensions.length; i++) { + const ext = extensions[i]; + + // '*' prefix = not the preferred type for this extension. So fixup the + // extension, and skip it. + if (ext[0] === '*') { + continue; + } + + if (!force && (ext in this._types)) { + throw new Error( + 'Attempt to change mapping for "' + ext + + '" extension from "' + this._types[ext] + '" to "' + type + + '". Pass `force=true` to allow this, otherwise remove "' + ext + + '" from the list of extensions for "' + type + '".' + ); + } + + this._types[ext] = type; + } + + // Use first extension as default + if (force || !this._extensions[type]) { + const ext = extensions[0]; + this._extensions[type] = (ext[0] !== '*') ? ext : ext.substr(1); + } + } +}; + +/** + * Lookup a mime type based on extension + */ +Mime.prototype.getType = function(path) { + path = String(path); + let last = path.replace(/^.*[/\\]/, '').toLowerCase(); + let ext = last.replace(/^.*\./, '').toLowerCase(); + + let hasPath = last.length < path.length; + let hasDot = ext.length < last.length - 1; + + return (hasDot || !hasPath) && this._types[ext] || null; +}; + +/** + * Return file extension associated with a mime type + */ +Mime.prototype.getExtension = function(type) { + type = /^\s*([^;\s]*)/.test(type) && RegExp.$1; + return type && this._extensions[type.toLowerCase()] || null; +}; + +module.exports = Mime; + + +/***/ }), + +/***/ 4900: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +let Mime = __nccwpck_require__(4402); +module.exports = new Mime(__nccwpck_require__(3725), __nccwpck_require__(8548)); + + +/***/ }), + +/***/ 8548: +/***/ ((module) => { + +module.exports = {"application/prs.cww":["cww"],"application/vnd.1000minds.decision-model+xml":["1km"],"application/vnd.3gpp.pic-bw-large":["plb"],"application/vnd.3gpp.pic-bw-small":["psb"],"application/vnd.3gpp.pic-bw-var":["pvb"],"application/vnd.3gpp2.tcap":["tcap"],"application/vnd.3m.post-it-notes":["pwn"],"application/vnd.accpac.simply.aso":["aso"],"application/vnd.accpac.simply.imp":["imp"],"application/vnd.acucobol":["acu"],"application/vnd.acucorp":["atc","acutc"],"application/vnd.adobe.air-application-installer-package+zip":["air"],"application/vnd.adobe.formscentral.fcdt":["fcdt"],"application/vnd.adobe.fxp":["fxp","fxpl"],"application/vnd.adobe.xdp+xml":["xdp"],"application/vnd.adobe.xfdf":["xfdf"],"application/vnd.ahead.space":["ahead"],"application/vnd.airzip.filesecure.azf":["azf"],"application/vnd.airzip.filesecure.azs":["azs"],"application/vnd.amazon.ebook":["azw"],"application/vnd.americandynamics.acc":["acc"],"application/vnd.amiga.ami":["ami"],"application/vnd.android.package-archive":["apk"],"application/vnd.anser-web-certificate-issue-initiation":["cii"],"application/vnd.anser-web-funds-transfer-initiation":["fti"],"application/vnd.antix.game-component":["atx"],"application/vnd.apple.installer+xml":["mpkg"],"application/vnd.apple.keynote":["key"],"application/vnd.apple.mpegurl":["m3u8"],"application/vnd.apple.numbers":["numbers"],"application/vnd.apple.pages":["pages"],"application/vnd.apple.pkpass":["pkpass"],"application/vnd.aristanetworks.swi":["swi"],"application/vnd.astraea-software.iota":["iota"],"application/vnd.audiograph":["aep"],"application/vnd.balsamiq.bmml+xml":["bmml"],"application/vnd.blueice.multipass":["mpm"],"application/vnd.bmi":["bmi"],"application/vnd.businessobjects":["rep"],"application/vnd.chemdraw+xml":["cdxml"],"application/vnd.chipnuts.karaoke-mmd":["mmd"],"application/vnd.cinderella":["cdy"],"application/vnd.citationstyles.style+xml":["csl"],"application/vnd.claymore":["cla"],"application/vnd.cloanto.rp9":["rp9"],"application/vnd.clonk.c4group":["c4g","c4d","c4f","c4p","c4u"],"application/vnd.cluetrust.cartomobile-config":["c11amc"],"application/vnd.cluetrust.cartomobile-config-pkg":["c11amz"],"application/vnd.commonspace":["csp"],"application/vnd.contact.cmsg":["cdbcmsg"],"application/vnd.cosmocaller":["cmc"],"application/vnd.crick.clicker":["clkx"],"application/vnd.crick.clicker.keyboard":["clkk"],"application/vnd.crick.clicker.palette":["clkp"],"application/vnd.crick.clicker.template":["clkt"],"application/vnd.crick.clicker.wordbank":["clkw"],"application/vnd.criticaltools.wbs+xml":["wbs"],"application/vnd.ctc-posml":["pml"],"application/vnd.cups-ppd":["ppd"],"application/vnd.curl.car":["car"],"application/vnd.curl.pcurl":["pcurl"],"application/vnd.dart":["dart"],"application/vnd.data-vision.rdz":["rdz"],"application/vnd.dbf":["dbf"],"application/vnd.dece.data":["uvf","uvvf","uvd","uvvd"],"application/vnd.dece.ttml+xml":["uvt","uvvt"],"application/vnd.dece.unspecified":["uvx","uvvx"],"application/vnd.dece.zip":["uvz","uvvz"],"application/vnd.denovo.fcselayout-link":["fe_launch"],"application/vnd.dna":["dna"],"application/vnd.dolby.mlp":["mlp"],"application/vnd.dpgraph":["dpg"],"application/vnd.dreamfactory":["dfac"],"application/vnd.ds-keypoint":["kpxx"],"application/vnd.dvb.ait":["ait"],"application/vnd.dvb.service":["svc"],"application/vnd.dynageo":["geo"],"application/vnd.ecowin.chart":["mag"],"application/vnd.enliven":["nml"],"application/vnd.epson.esf":["esf"],"application/vnd.epson.msf":["msf"],"application/vnd.epson.quickanime":["qam"],"application/vnd.epson.salt":["slt"],"application/vnd.epson.ssf":["ssf"],"application/vnd.eszigno3+xml":["es3","et3"],"application/vnd.ezpix-album":["ez2"],"application/vnd.ezpix-package":["ez3"],"application/vnd.fdf":["fdf"],"application/vnd.fdsn.mseed":["mseed"],"application/vnd.fdsn.seed":["seed","dataless"],"application/vnd.flographit":["gph"],"application/vnd.fluxtime.clip":["ftc"],"application/vnd.framemaker":["fm","frame","maker","book"],"application/vnd.frogans.fnc":["fnc"],"application/vnd.frogans.ltf":["ltf"],"application/vnd.fsc.weblaunch":["fsc"],"application/vnd.fujitsu.oasys":["oas"],"application/vnd.fujitsu.oasys2":["oa2"],"application/vnd.fujitsu.oasys3":["oa3"],"application/vnd.fujitsu.oasysgp":["fg5"],"application/vnd.fujitsu.oasysprs":["bh2"],"application/vnd.fujixerox.ddd":["ddd"],"application/vnd.fujixerox.docuworks":["xdw"],"application/vnd.fujixerox.docuworks.binder":["xbd"],"application/vnd.fuzzysheet":["fzs"],"application/vnd.genomatix.tuxedo":["txd"],"application/vnd.geogebra.file":["ggb"],"application/vnd.geogebra.tool":["ggt"],"application/vnd.geometry-explorer":["gex","gre"],"application/vnd.geonext":["gxt"],"application/vnd.geoplan":["g2w"],"application/vnd.geospace":["g3w"],"application/vnd.gmx":["gmx"],"application/vnd.google-apps.document":["gdoc"],"application/vnd.google-apps.presentation":["gslides"],"application/vnd.google-apps.spreadsheet":["gsheet"],"application/vnd.google-earth.kml+xml":["kml"],"application/vnd.google-earth.kmz":["kmz"],"application/vnd.grafeq":["gqf","gqs"],"application/vnd.groove-account":["gac"],"application/vnd.groove-help":["ghf"],"application/vnd.groove-identity-message":["gim"],"application/vnd.groove-injector":["grv"],"application/vnd.groove-tool-message":["gtm"],"application/vnd.groove-tool-template":["tpl"],"application/vnd.groove-vcard":["vcg"],"application/vnd.hal+xml":["hal"],"application/vnd.handheld-entertainment+xml":["zmm"],"application/vnd.hbci":["hbci"],"application/vnd.hhe.lesson-player":["les"],"application/vnd.hp-hpgl":["hpgl"],"application/vnd.hp-hpid":["hpid"],"application/vnd.hp-hps":["hps"],"application/vnd.hp-jlyt":["jlt"],"application/vnd.hp-pcl":["pcl"],"application/vnd.hp-pclxl":["pclxl"],"application/vnd.hydrostatix.sof-data":["sfd-hdstx"],"application/vnd.ibm.minipay":["mpy"],"application/vnd.ibm.modcap":["afp","listafp","list3820"],"application/vnd.ibm.rights-management":["irm"],"application/vnd.ibm.secure-container":["sc"],"application/vnd.iccprofile":["icc","icm"],"application/vnd.igloader":["igl"],"application/vnd.immervision-ivp":["ivp"],"application/vnd.immervision-ivu":["ivu"],"application/vnd.insors.igm":["igm"],"application/vnd.intercon.formnet":["xpw","xpx"],"application/vnd.intergeo":["i2g"],"application/vnd.intu.qbo":["qbo"],"application/vnd.intu.qfx":["qfx"],"application/vnd.ipunplugged.rcprofile":["rcprofile"],"application/vnd.irepository.package+xml":["irp"],"application/vnd.is-xpr":["xpr"],"application/vnd.isac.fcs":["fcs"],"application/vnd.jam":["jam"],"application/vnd.jcp.javame.midlet-rms":["rms"],"application/vnd.jisp":["jisp"],"application/vnd.joost.joda-archive":["joda"],"application/vnd.kahootz":["ktz","ktr"],"application/vnd.kde.karbon":["karbon"],"application/vnd.kde.kchart":["chrt"],"application/vnd.kde.kformula":["kfo"],"application/vnd.kde.kivio":["flw"],"application/vnd.kde.kontour":["kon"],"application/vnd.kde.kpresenter":["kpr","kpt"],"application/vnd.kde.kspread":["ksp"],"application/vnd.kde.kword":["kwd","kwt"],"application/vnd.kenameaapp":["htke"],"application/vnd.kidspiration":["kia"],"application/vnd.kinar":["kne","knp"],"application/vnd.koan":["skp","skd","skt","skm"],"application/vnd.kodak-descriptor":["sse"],"application/vnd.las.las+xml":["lasxml"],"application/vnd.llamagraphics.life-balance.desktop":["lbd"],"application/vnd.llamagraphics.life-balance.exchange+xml":["lbe"],"application/vnd.lotus-1-2-3":["123"],"application/vnd.lotus-approach":["apr"],"application/vnd.lotus-freelance":["pre"],"application/vnd.lotus-notes":["nsf"],"application/vnd.lotus-organizer":["org"],"application/vnd.lotus-screencam":["scm"],"application/vnd.lotus-wordpro":["lwp"],"application/vnd.macports.portpkg":["portpkg"],"application/vnd.mapbox-vector-tile":["mvt"],"application/vnd.mcd":["mcd"],"application/vnd.medcalcdata":["mc1"],"application/vnd.mediastation.cdkey":["cdkey"],"application/vnd.mfer":["mwf"],"application/vnd.mfmp":["mfm"],"application/vnd.micrografx.flo":["flo"],"application/vnd.micrografx.igx":["igx"],"application/vnd.mif":["mif"],"application/vnd.mobius.daf":["daf"],"application/vnd.mobius.dis":["dis"],"application/vnd.mobius.mbk":["mbk"],"application/vnd.mobius.mqy":["mqy"],"application/vnd.mobius.msl":["msl"],"application/vnd.mobius.plc":["plc"],"application/vnd.mobius.txf":["txf"],"application/vnd.mophun.application":["mpn"],"application/vnd.mophun.certificate":["mpc"],"application/vnd.mozilla.xul+xml":["xul"],"application/vnd.ms-artgalry":["cil"],"application/vnd.ms-cab-compressed":["cab"],"application/vnd.ms-excel":["xls","xlm","xla","xlc","xlt","xlw"],"application/vnd.ms-excel.addin.macroenabled.12":["xlam"],"application/vnd.ms-excel.sheet.binary.macroenabled.12":["xlsb"],"application/vnd.ms-excel.sheet.macroenabled.12":["xlsm"],"application/vnd.ms-excel.template.macroenabled.12":["xltm"],"application/vnd.ms-fontobject":["eot"],"application/vnd.ms-htmlhelp":["chm"],"application/vnd.ms-ims":["ims"],"application/vnd.ms-lrm":["lrm"],"application/vnd.ms-officetheme":["thmx"],"application/vnd.ms-outlook":["msg"],"application/vnd.ms-pki.seccat":["cat"],"application/vnd.ms-pki.stl":["*stl"],"application/vnd.ms-powerpoint":["ppt","pps","pot"],"application/vnd.ms-powerpoint.addin.macroenabled.12":["ppam"],"application/vnd.ms-powerpoint.presentation.macroenabled.12":["pptm"],"application/vnd.ms-powerpoint.slide.macroenabled.12":["sldm"],"application/vnd.ms-powerpoint.slideshow.macroenabled.12":["ppsm"],"application/vnd.ms-powerpoint.template.macroenabled.12":["potm"],"application/vnd.ms-project":["mpp","mpt"],"application/vnd.ms-word.document.macroenabled.12":["docm"],"application/vnd.ms-word.template.macroenabled.12":["dotm"],"application/vnd.ms-works":["wps","wks","wcm","wdb"],"application/vnd.ms-wpl":["wpl"],"application/vnd.ms-xpsdocument":["xps"],"application/vnd.mseq":["mseq"],"application/vnd.musician":["mus"],"application/vnd.muvee.style":["msty"],"application/vnd.mynfc":["taglet"],"application/vnd.neurolanguage.nlu":["nlu"],"application/vnd.nitf":["ntf","nitf"],"application/vnd.noblenet-directory":["nnd"],"application/vnd.noblenet-sealer":["nns"],"application/vnd.noblenet-web":["nnw"],"application/vnd.nokia.n-gage.ac+xml":["*ac"],"application/vnd.nokia.n-gage.data":["ngdat"],"application/vnd.nokia.n-gage.symbian.install":["n-gage"],"application/vnd.nokia.radio-preset":["rpst"],"application/vnd.nokia.radio-presets":["rpss"],"application/vnd.novadigm.edm":["edm"],"application/vnd.novadigm.edx":["edx"],"application/vnd.novadigm.ext":["ext"],"application/vnd.oasis.opendocument.chart":["odc"],"application/vnd.oasis.opendocument.chart-template":["otc"],"application/vnd.oasis.opendocument.database":["odb"],"application/vnd.oasis.opendocument.formula":["odf"],"application/vnd.oasis.opendocument.formula-template":["odft"],"application/vnd.oasis.opendocument.graphics":["odg"],"application/vnd.oasis.opendocument.graphics-template":["otg"],"application/vnd.oasis.opendocument.image":["odi"],"application/vnd.oasis.opendocument.image-template":["oti"],"application/vnd.oasis.opendocument.presentation":["odp"],"application/vnd.oasis.opendocument.presentation-template":["otp"],"application/vnd.oasis.opendocument.spreadsheet":["ods"],"application/vnd.oasis.opendocument.spreadsheet-template":["ots"],"application/vnd.oasis.opendocument.text":["odt"],"application/vnd.oasis.opendocument.text-master":["odm"],"application/vnd.oasis.opendocument.text-template":["ott"],"application/vnd.oasis.opendocument.text-web":["oth"],"application/vnd.olpc-sugar":["xo"],"application/vnd.oma.dd2+xml":["dd2"],"application/vnd.openblox.game+xml":["obgx"],"application/vnd.openofficeorg.extension":["oxt"],"application/vnd.openstreetmap.data+xml":["osm"],"application/vnd.openxmlformats-officedocument.presentationml.presentation":["pptx"],"application/vnd.openxmlformats-officedocument.presentationml.slide":["sldx"],"application/vnd.openxmlformats-officedocument.presentationml.slideshow":["ppsx"],"application/vnd.openxmlformats-officedocument.presentationml.template":["potx"],"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet":["xlsx"],"application/vnd.openxmlformats-officedocument.spreadsheetml.template":["xltx"],"application/vnd.openxmlformats-officedocument.wordprocessingml.document":["docx"],"application/vnd.openxmlformats-officedocument.wordprocessingml.template":["dotx"],"application/vnd.osgeo.mapguide.package":["mgp"],"application/vnd.osgi.dp":["dp"],"application/vnd.osgi.subsystem":["esa"],"application/vnd.palm":["pdb","pqa","oprc"],"application/vnd.pawaafile":["paw"],"application/vnd.pg.format":["str"],"application/vnd.pg.osasli":["ei6"],"application/vnd.picsel":["efif"],"application/vnd.pmi.widget":["wg"],"application/vnd.pocketlearn":["plf"],"application/vnd.powerbuilder6":["pbd"],"application/vnd.previewsystems.box":["box"],"application/vnd.proteus.magazine":["mgz"],"application/vnd.publishare-delta-tree":["qps"],"application/vnd.pvi.ptid1":["ptid"],"application/vnd.quark.quarkxpress":["qxd","qxt","qwd","qwt","qxl","qxb"],"application/vnd.rar":["rar"],"application/vnd.realvnc.bed":["bed"],"application/vnd.recordare.musicxml":["mxl"],"application/vnd.recordare.musicxml+xml":["musicxml"],"application/vnd.rig.cryptonote":["cryptonote"],"application/vnd.rim.cod":["cod"],"application/vnd.rn-realmedia":["rm"],"application/vnd.rn-realmedia-vbr":["rmvb"],"application/vnd.route66.link66+xml":["link66"],"application/vnd.sailingtracker.track":["st"],"application/vnd.seemail":["see"],"application/vnd.sema":["sema"],"application/vnd.semd":["semd"],"application/vnd.semf":["semf"],"application/vnd.shana.informed.formdata":["ifm"],"application/vnd.shana.informed.formtemplate":["itp"],"application/vnd.shana.informed.interchange":["iif"],"application/vnd.shana.informed.package":["ipk"],"application/vnd.simtech-mindmapper":["twd","twds"],"application/vnd.smaf":["mmf"],"application/vnd.smart.teacher":["teacher"],"application/vnd.software602.filler.form+xml":["fo"],"application/vnd.solent.sdkm+xml":["sdkm","sdkd"],"application/vnd.spotfire.dxp":["dxp"],"application/vnd.spotfire.sfs":["sfs"],"application/vnd.stardivision.calc":["sdc"],"application/vnd.stardivision.draw":["sda"],"application/vnd.stardivision.impress":["sdd"],"application/vnd.stardivision.math":["smf"],"application/vnd.stardivision.writer":["sdw","vor"],"application/vnd.stardivision.writer-global":["sgl"],"application/vnd.stepmania.package":["smzip"],"application/vnd.stepmania.stepchart":["sm"],"application/vnd.sun.wadl+xml":["wadl"],"application/vnd.sun.xml.calc":["sxc"],"application/vnd.sun.xml.calc.template":["stc"],"application/vnd.sun.xml.draw":["sxd"],"application/vnd.sun.xml.draw.template":["std"],"application/vnd.sun.xml.impress":["sxi"],"application/vnd.sun.xml.impress.template":["sti"],"application/vnd.sun.xml.math":["sxm"],"application/vnd.sun.xml.writer":["sxw"],"application/vnd.sun.xml.writer.global":["sxg"],"application/vnd.sun.xml.writer.template":["stw"],"application/vnd.sus-calendar":["sus","susp"],"application/vnd.svd":["svd"],"application/vnd.symbian.install":["sis","sisx"],"application/vnd.syncml+xml":["xsm"],"application/vnd.syncml.dm+wbxml":["bdm"],"application/vnd.syncml.dm+xml":["xdm"],"application/vnd.syncml.dmddf+xml":["ddf"],"application/vnd.tao.intent-module-archive":["tao"],"application/vnd.tcpdump.pcap":["pcap","cap","dmp"],"application/vnd.tmobile-livetv":["tmo"],"application/vnd.trid.tpt":["tpt"],"application/vnd.triscape.mxs":["mxs"],"application/vnd.trueapp":["tra"],"application/vnd.ufdl":["ufd","ufdl"],"application/vnd.uiq.theme":["utz"],"application/vnd.umajin":["umj"],"application/vnd.unity":["unityweb"],"application/vnd.uoml+xml":["uoml"],"application/vnd.vcx":["vcx"],"application/vnd.visio":["vsd","vst","vss","vsw"],"application/vnd.visionary":["vis"],"application/vnd.vsf":["vsf"],"application/vnd.wap.wbxml":["wbxml"],"application/vnd.wap.wmlc":["wmlc"],"application/vnd.wap.wmlscriptc":["wmlsc"],"application/vnd.webturbo":["wtb"],"application/vnd.wolfram.player":["nbp"],"application/vnd.wordperfect":["wpd"],"application/vnd.wqd":["wqd"],"application/vnd.wt.stf":["stf"],"application/vnd.xara":["xar"],"application/vnd.xfdl":["xfdl"],"application/vnd.yamaha.hv-dic":["hvd"],"application/vnd.yamaha.hv-script":["hvs"],"application/vnd.yamaha.hv-voice":["hvp"],"application/vnd.yamaha.openscoreformat":["osf"],"application/vnd.yamaha.openscoreformat.osfpvg+xml":["osfpvg"],"application/vnd.yamaha.smaf-audio":["saf"],"application/vnd.yamaha.smaf-phrase":["spf"],"application/vnd.yellowriver-custom-menu":["cmp"],"application/vnd.zul":["zir","zirz"],"application/vnd.zzazz.deck+xml":["zaz"],"application/x-7z-compressed":["7z"],"application/x-abiword":["abw"],"application/x-ace-compressed":["ace"],"application/x-apple-diskimage":["*dmg"],"application/x-arj":["arj"],"application/x-authorware-bin":["aab","x32","u32","vox"],"application/x-authorware-map":["aam"],"application/x-authorware-seg":["aas"],"application/x-bcpio":["bcpio"],"application/x-bdoc":["*bdoc"],"application/x-bittorrent":["torrent"],"application/x-blorb":["blb","blorb"],"application/x-bzip":["bz"],"application/x-bzip2":["bz2","boz"],"application/x-cbr":["cbr","cba","cbt","cbz","cb7"],"application/x-cdlink":["vcd"],"application/x-cfs-compressed":["cfs"],"application/x-chat":["chat"],"application/x-chess-pgn":["pgn"],"application/x-chrome-extension":["crx"],"application/x-cocoa":["cco"],"application/x-conference":["nsc"],"application/x-cpio":["cpio"],"application/x-csh":["csh"],"application/x-debian-package":["*deb","udeb"],"application/x-dgc-compressed":["dgc"],"application/x-director":["dir","dcr","dxr","cst","cct","cxt","w3d","fgd","swa"],"application/x-doom":["wad"],"application/x-dtbncx+xml":["ncx"],"application/x-dtbook+xml":["dtb"],"application/x-dtbresource+xml":["res"],"application/x-dvi":["dvi"],"application/x-envoy":["evy"],"application/x-eva":["eva"],"application/x-font-bdf":["bdf"],"application/x-font-ghostscript":["gsf"],"application/x-font-linux-psf":["psf"],"application/x-font-pcf":["pcf"],"application/x-font-snf":["snf"],"application/x-font-type1":["pfa","pfb","pfm","afm"],"application/x-freearc":["arc"],"application/x-futuresplash":["spl"],"application/x-gca-compressed":["gca"],"application/x-glulx":["ulx"],"application/x-gnumeric":["gnumeric"],"application/x-gramps-xml":["gramps"],"application/x-gtar":["gtar"],"application/x-hdf":["hdf"],"application/x-httpd-php":["php"],"application/x-install-instructions":["install"],"application/x-iso9660-image":["*iso"],"application/x-iwork-keynote-sffkey":["*key"],"application/x-iwork-numbers-sffnumbers":["*numbers"],"application/x-iwork-pages-sffpages":["*pages"],"application/x-java-archive-diff":["jardiff"],"application/x-java-jnlp-file":["jnlp"],"application/x-keepass2":["kdbx"],"application/x-latex":["latex"],"application/x-lua-bytecode":["luac"],"application/x-lzh-compressed":["lzh","lha"],"application/x-makeself":["run"],"application/x-mie":["mie"],"application/x-mobipocket-ebook":["prc","mobi"],"application/x-ms-application":["application"],"application/x-ms-shortcut":["lnk"],"application/x-ms-wmd":["wmd"],"application/x-ms-wmz":["wmz"],"application/x-ms-xbap":["xbap"],"application/x-msaccess":["mdb"],"application/x-msbinder":["obd"],"application/x-mscardfile":["crd"],"application/x-msclip":["clp"],"application/x-msdos-program":["*exe"],"application/x-msdownload":["*exe","*dll","com","bat","*msi"],"application/x-msmediaview":["mvb","m13","m14"],"application/x-msmetafile":["*wmf","*wmz","*emf","emz"],"application/x-msmoney":["mny"],"application/x-mspublisher":["pub"],"application/x-msschedule":["scd"],"application/x-msterminal":["trm"],"application/x-mswrite":["wri"],"application/x-netcdf":["nc","cdf"],"application/x-ns-proxy-autoconfig":["pac"],"application/x-nzb":["nzb"],"application/x-perl":["pl","pm"],"application/x-pilot":["*prc","*pdb"],"application/x-pkcs12":["p12","pfx"],"application/x-pkcs7-certificates":["p7b","spc"],"application/x-pkcs7-certreqresp":["p7r"],"application/x-rar-compressed":["*rar"],"application/x-redhat-package-manager":["rpm"],"application/x-research-info-systems":["ris"],"application/x-sea":["sea"],"application/x-sh":["sh"],"application/x-shar":["shar"],"application/x-shockwave-flash":["swf"],"application/x-silverlight-app":["xap"],"application/x-sql":["sql"],"application/x-stuffit":["sit"],"application/x-stuffitx":["sitx"],"application/x-subrip":["srt"],"application/x-sv4cpio":["sv4cpio"],"application/x-sv4crc":["sv4crc"],"application/x-t3vm-image":["t3"],"application/x-tads":["gam"],"application/x-tar":["tar"],"application/x-tcl":["tcl","tk"],"application/x-tex":["tex"],"application/x-tex-tfm":["tfm"],"application/x-texinfo":["texinfo","texi"],"application/x-tgif":["*obj"],"application/x-ustar":["ustar"],"application/x-virtualbox-hdd":["hdd"],"application/x-virtualbox-ova":["ova"],"application/x-virtualbox-ovf":["ovf"],"application/x-virtualbox-vbox":["vbox"],"application/x-virtualbox-vbox-extpack":["vbox-extpack"],"application/x-virtualbox-vdi":["vdi"],"application/x-virtualbox-vhd":["vhd"],"application/x-virtualbox-vmdk":["vmdk"],"application/x-wais-source":["src"],"application/x-web-app-manifest+json":["webapp"],"application/x-x509-ca-cert":["der","crt","pem"],"application/x-xfig":["fig"],"application/x-xliff+xml":["*xlf"],"application/x-xpinstall":["xpi"],"application/x-xz":["xz"],"application/x-zmachine":["z1","z2","z3","z4","z5","z6","z7","z8"],"audio/vnd.dece.audio":["uva","uvva"],"audio/vnd.digital-winds":["eol"],"audio/vnd.dra":["dra"],"audio/vnd.dts":["dts"],"audio/vnd.dts.hd":["dtshd"],"audio/vnd.lucent.voice":["lvp"],"audio/vnd.ms-playready.media.pya":["pya"],"audio/vnd.nuera.ecelp4800":["ecelp4800"],"audio/vnd.nuera.ecelp7470":["ecelp7470"],"audio/vnd.nuera.ecelp9600":["ecelp9600"],"audio/vnd.rip":["rip"],"audio/x-aac":["aac"],"audio/x-aiff":["aif","aiff","aifc"],"audio/x-caf":["caf"],"audio/x-flac":["flac"],"audio/x-m4a":["*m4a"],"audio/x-matroska":["mka"],"audio/x-mpegurl":["m3u"],"audio/x-ms-wax":["wax"],"audio/x-ms-wma":["wma"],"audio/x-pn-realaudio":["ram","ra"],"audio/x-pn-realaudio-plugin":["rmp"],"audio/x-realaudio":["*ra"],"audio/x-wav":["*wav"],"chemical/x-cdx":["cdx"],"chemical/x-cif":["cif"],"chemical/x-cmdf":["cmdf"],"chemical/x-cml":["cml"],"chemical/x-csml":["csml"],"chemical/x-xyz":["xyz"],"image/prs.btif":["btif"],"image/prs.pti":["pti"],"image/vnd.adobe.photoshop":["psd"],"image/vnd.airzip.accelerator.azv":["azv"],"image/vnd.dece.graphic":["uvi","uvvi","uvg","uvvg"],"image/vnd.djvu":["djvu","djv"],"image/vnd.dvb.subtitle":["*sub"],"image/vnd.dwg":["dwg"],"image/vnd.dxf":["dxf"],"image/vnd.fastbidsheet":["fbs"],"image/vnd.fpx":["fpx"],"image/vnd.fst":["fst"],"image/vnd.fujixerox.edmics-mmr":["mmr"],"image/vnd.fujixerox.edmics-rlc":["rlc"],"image/vnd.microsoft.icon":["ico"],"image/vnd.ms-dds":["dds"],"image/vnd.ms-modi":["mdi"],"image/vnd.ms-photo":["wdp"],"image/vnd.net-fpx":["npx"],"image/vnd.pco.b16":["b16"],"image/vnd.tencent.tap":["tap"],"image/vnd.valve.source.texture":["vtf"],"image/vnd.wap.wbmp":["wbmp"],"image/vnd.xiff":["xif"],"image/vnd.zbrush.pcx":["pcx"],"image/x-3ds":["3ds"],"image/x-cmu-raster":["ras"],"image/x-cmx":["cmx"],"image/x-freehand":["fh","fhc","fh4","fh5","fh7"],"image/x-icon":["*ico"],"image/x-jng":["jng"],"image/x-mrsid-image":["sid"],"image/x-ms-bmp":["*bmp"],"image/x-pcx":["*pcx"],"image/x-pict":["pic","pct"],"image/x-portable-anymap":["pnm"],"image/x-portable-bitmap":["pbm"],"image/x-portable-graymap":["pgm"],"image/x-portable-pixmap":["ppm"],"image/x-rgb":["rgb"],"image/x-tga":["tga"],"image/x-xbitmap":["xbm"],"image/x-xpixmap":["xpm"],"image/x-xwindowdump":["xwd"],"message/vnd.wfa.wsc":["wsc"],"model/vnd.collada+xml":["dae"],"model/vnd.dwf":["dwf"],"model/vnd.gdl":["gdl"],"model/vnd.gtw":["gtw"],"model/vnd.mts":["mts"],"model/vnd.opengex":["ogex"],"model/vnd.parasolid.transmit.binary":["x_b"],"model/vnd.parasolid.transmit.text":["x_t"],"model/vnd.sap.vds":["vds"],"model/vnd.usdz+zip":["usdz"],"model/vnd.valve.source.compiled-map":["bsp"],"model/vnd.vtu":["vtu"],"text/prs.lines.tag":["dsc"],"text/vnd.curl":["curl"],"text/vnd.curl.dcurl":["dcurl"],"text/vnd.curl.mcurl":["mcurl"],"text/vnd.curl.scurl":["scurl"],"text/vnd.dvb.subtitle":["sub"],"text/vnd.fly":["fly"],"text/vnd.fmi.flexstor":["flx"],"text/vnd.graphviz":["gv"],"text/vnd.in3d.3dml":["3dml"],"text/vnd.in3d.spot":["spot"],"text/vnd.sun.j2me.app-descriptor":["jad"],"text/vnd.wap.wml":["wml"],"text/vnd.wap.wmlscript":["wmls"],"text/x-asm":["s","asm"],"text/x-c":["c","cc","cxx","cpp","h","hh","dic"],"text/x-component":["htc"],"text/x-fortran":["f","for","f77","f90"],"text/x-handlebars-template":["hbs"],"text/x-java-source":["java"],"text/x-lua":["lua"],"text/x-markdown":["mkd"],"text/x-nfo":["nfo"],"text/x-opml":["opml"],"text/x-org":["*org"],"text/x-pascal":["p","pas"],"text/x-processing":["pde"],"text/x-sass":["sass"],"text/x-scss":["scss"],"text/x-setext":["etx"],"text/x-sfv":["sfv"],"text/x-suse-ymp":["ymp"],"text/x-uuencode":["uu"],"text/x-vcalendar":["vcs"],"text/x-vcard":["vcf"],"video/vnd.dece.hd":["uvh","uvvh"],"video/vnd.dece.mobile":["uvm","uvvm"],"video/vnd.dece.pd":["uvp","uvvp"],"video/vnd.dece.sd":["uvs","uvvs"],"video/vnd.dece.video":["uvv","uvvv"],"video/vnd.dvb.file":["dvb"],"video/vnd.fvt":["fvt"],"video/vnd.mpegurl":["mxu","m4u"],"video/vnd.ms-playready.media.pyv":["pyv"],"video/vnd.uvvu.mp4":["uvu","uvvu"],"video/vnd.vivo":["viv"],"video/x-f4v":["f4v"],"video/x-fli":["fli"],"video/x-flv":["flv"],"video/x-m4v":["m4v"],"video/x-matroska":["mkv","mk3d","mks"],"video/x-mng":["mng"],"video/x-ms-asf":["asf","asx"],"video/x-ms-vob":["vob"],"video/x-ms-wm":["wm"],"video/x-ms-wmv":["wmv"],"video/x-ms-wmx":["wmx"],"video/x-ms-wvx":["wvx"],"video/x-msvideo":["avi"],"video/x-sgi-movie":["movie"],"video/x-smv":["smv"],"x-conference/x-cooltalk":["ice"]}; + +/***/ }), + +/***/ 3725: +/***/ ((module) => { + +module.exports = {"application/andrew-inset":["ez"],"application/applixware":["aw"],"application/atom+xml":["atom"],"application/atomcat+xml":["atomcat"],"application/atomdeleted+xml":["atomdeleted"],"application/atomsvc+xml":["atomsvc"],"application/atsc-dwd+xml":["dwd"],"application/atsc-held+xml":["held"],"application/atsc-rsat+xml":["rsat"],"application/bdoc":["bdoc"],"application/calendar+xml":["xcs"],"application/ccxml+xml":["ccxml"],"application/cdfx+xml":["cdfx"],"application/cdmi-capability":["cdmia"],"application/cdmi-container":["cdmic"],"application/cdmi-domain":["cdmid"],"application/cdmi-object":["cdmio"],"application/cdmi-queue":["cdmiq"],"application/cu-seeme":["cu"],"application/dash+xml":["mpd"],"application/davmount+xml":["davmount"],"application/docbook+xml":["dbk"],"application/dssc+der":["dssc"],"application/dssc+xml":["xdssc"],"application/ecmascript":["es","ecma"],"application/emma+xml":["emma"],"application/emotionml+xml":["emotionml"],"application/epub+zip":["epub"],"application/exi":["exi"],"application/express":["exp"],"application/fdt+xml":["fdt"],"application/font-tdpfr":["pfr"],"application/geo+json":["geojson"],"application/gml+xml":["gml"],"application/gpx+xml":["gpx"],"application/gxf":["gxf"],"application/gzip":["gz"],"application/hjson":["hjson"],"application/hyperstudio":["stk"],"application/inkml+xml":["ink","inkml"],"application/ipfix":["ipfix"],"application/its+xml":["its"],"application/java-archive":["jar","war","ear"],"application/java-serialized-object":["ser"],"application/java-vm":["class"],"application/javascript":["js","mjs"],"application/json":["json","map"],"application/json5":["json5"],"application/jsonml+json":["jsonml"],"application/ld+json":["jsonld"],"application/lgr+xml":["lgr"],"application/lost+xml":["lostxml"],"application/mac-binhex40":["hqx"],"application/mac-compactpro":["cpt"],"application/mads+xml":["mads"],"application/manifest+json":["webmanifest"],"application/marc":["mrc"],"application/marcxml+xml":["mrcx"],"application/mathematica":["ma","nb","mb"],"application/mathml+xml":["mathml"],"application/mbox":["mbox"],"application/mediaservercontrol+xml":["mscml"],"application/metalink+xml":["metalink"],"application/metalink4+xml":["meta4"],"application/mets+xml":["mets"],"application/mmt-aei+xml":["maei"],"application/mmt-usd+xml":["musd"],"application/mods+xml":["mods"],"application/mp21":["m21","mp21"],"application/mp4":["mp4s","m4p"],"application/msword":["doc","dot"],"application/mxf":["mxf"],"application/n-quads":["nq"],"application/n-triples":["nt"],"application/node":["cjs"],"application/octet-stream":["bin","dms","lrf","mar","so","dist","distz","pkg","bpk","dump","elc","deploy","exe","dll","deb","dmg","iso","img","msi","msp","msm","buffer"],"application/oda":["oda"],"application/oebps-package+xml":["opf"],"application/ogg":["ogx"],"application/omdoc+xml":["omdoc"],"application/onenote":["onetoc","onetoc2","onetmp","onepkg"],"application/oxps":["oxps"],"application/p2p-overlay+xml":["relo"],"application/patch-ops-error+xml":["xer"],"application/pdf":["pdf"],"application/pgp-encrypted":["pgp"],"application/pgp-signature":["asc","sig"],"application/pics-rules":["prf"],"application/pkcs10":["p10"],"application/pkcs7-mime":["p7m","p7c"],"application/pkcs7-signature":["p7s"],"application/pkcs8":["p8"],"application/pkix-attr-cert":["ac"],"application/pkix-cert":["cer"],"application/pkix-crl":["crl"],"application/pkix-pkipath":["pkipath"],"application/pkixcmp":["pki"],"application/pls+xml":["pls"],"application/postscript":["ai","eps","ps"],"application/provenance+xml":["provx"],"application/pskc+xml":["pskcxml"],"application/raml+yaml":["raml"],"application/rdf+xml":["rdf","owl"],"application/reginfo+xml":["rif"],"application/relax-ng-compact-syntax":["rnc"],"application/resource-lists+xml":["rl"],"application/resource-lists-diff+xml":["rld"],"application/rls-services+xml":["rs"],"application/route-apd+xml":["rapd"],"application/route-s-tsid+xml":["sls"],"application/route-usd+xml":["rusd"],"application/rpki-ghostbusters":["gbr"],"application/rpki-manifest":["mft"],"application/rpki-roa":["roa"],"application/rsd+xml":["rsd"],"application/rss+xml":["rss"],"application/rtf":["rtf"],"application/sbml+xml":["sbml"],"application/scvp-cv-request":["scq"],"application/scvp-cv-response":["scs"],"application/scvp-vp-request":["spq"],"application/scvp-vp-response":["spp"],"application/sdp":["sdp"],"application/senml+xml":["senmlx"],"application/sensml+xml":["sensmlx"],"application/set-payment-initiation":["setpay"],"application/set-registration-initiation":["setreg"],"application/shf+xml":["shf"],"application/sieve":["siv","sieve"],"application/smil+xml":["smi","smil"],"application/sparql-query":["rq"],"application/sparql-results+xml":["srx"],"application/srgs":["gram"],"application/srgs+xml":["grxml"],"application/sru+xml":["sru"],"application/ssdl+xml":["ssdl"],"application/ssml+xml":["ssml"],"application/swid+xml":["swidtag"],"application/tei+xml":["tei","teicorpus"],"application/thraud+xml":["tfi"],"application/timestamped-data":["tsd"],"application/toml":["toml"],"application/trig":["trig"],"application/ttml+xml":["ttml"],"application/ubjson":["ubj"],"application/urc-ressheet+xml":["rsheet"],"application/urc-targetdesc+xml":["td"],"application/voicexml+xml":["vxml"],"application/wasm":["wasm"],"application/widget":["wgt"],"application/winhlp":["hlp"],"application/wsdl+xml":["wsdl"],"application/wspolicy+xml":["wspolicy"],"application/xaml+xml":["xaml"],"application/xcap-att+xml":["xav"],"application/xcap-caps+xml":["xca"],"application/xcap-diff+xml":["xdf"],"application/xcap-el+xml":["xel"],"application/xcap-ns+xml":["xns"],"application/xenc+xml":["xenc"],"application/xhtml+xml":["xhtml","xht"],"application/xliff+xml":["xlf"],"application/xml":["xml","xsl","xsd","rng"],"application/xml-dtd":["dtd"],"application/xop+xml":["xop"],"application/xproc+xml":["xpl"],"application/xslt+xml":["*xsl","xslt"],"application/xspf+xml":["xspf"],"application/xv+xml":["mxml","xhvml","xvml","xvm"],"application/yang":["yang"],"application/yin+xml":["yin"],"application/zip":["zip"],"audio/3gpp":["*3gpp"],"audio/adpcm":["adp"],"audio/amr":["amr"],"audio/basic":["au","snd"],"audio/midi":["mid","midi","kar","rmi"],"audio/mobile-xmf":["mxmf"],"audio/mp3":["*mp3"],"audio/mp4":["m4a","mp4a"],"audio/mpeg":["mpga","mp2","mp2a","mp3","m2a","m3a"],"audio/ogg":["oga","ogg","spx","opus"],"audio/s3m":["s3m"],"audio/silk":["sil"],"audio/wav":["wav"],"audio/wave":["*wav"],"audio/webm":["weba"],"audio/xm":["xm"],"font/collection":["ttc"],"font/otf":["otf"],"font/ttf":["ttf"],"font/woff":["woff"],"font/woff2":["woff2"],"image/aces":["exr"],"image/apng":["apng"],"image/avif":["avif"],"image/bmp":["bmp"],"image/cgm":["cgm"],"image/dicom-rle":["drle"],"image/emf":["emf"],"image/fits":["fits"],"image/g3fax":["g3"],"image/gif":["gif"],"image/heic":["heic"],"image/heic-sequence":["heics"],"image/heif":["heif"],"image/heif-sequence":["heifs"],"image/hej2k":["hej2"],"image/hsj2":["hsj2"],"image/ief":["ief"],"image/jls":["jls"],"image/jp2":["jp2","jpg2"],"image/jpeg":["jpeg","jpg","jpe"],"image/jph":["jph"],"image/jphc":["jhc"],"image/jpm":["jpm"],"image/jpx":["jpx","jpf"],"image/jxr":["jxr"],"image/jxra":["jxra"],"image/jxrs":["jxrs"],"image/jxs":["jxs"],"image/jxsc":["jxsc"],"image/jxsi":["jxsi"],"image/jxss":["jxss"],"image/ktx":["ktx"],"image/ktx2":["ktx2"],"image/png":["png"],"image/sgi":["sgi"],"image/svg+xml":["svg","svgz"],"image/t38":["t38"],"image/tiff":["tif","tiff"],"image/tiff-fx":["tfx"],"image/webp":["webp"],"image/wmf":["wmf"],"message/disposition-notification":["disposition-notification"],"message/global":["u8msg"],"message/global-delivery-status":["u8dsn"],"message/global-disposition-notification":["u8mdn"],"message/global-headers":["u8hdr"],"message/rfc822":["eml","mime"],"model/3mf":["3mf"],"model/gltf+json":["gltf"],"model/gltf-binary":["glb"],"model/iges":["igs","iges"],"model/mesh":["msh","mesh","silo"],"model/mtl":["mtl"],"model/obj":["obj"],"model/step+xml":["stpx"],"model/step+zip":["stpz"],"model/step-xml+zip":["stpxz"],"model/stl":["stl"],"model/vrml":["wrl","vrml"],"model/x3d+binary":["*x3db","x3dbz"],"model/x3d+fastinfoset":["x3db"],"model/x3d+vrml":["*x3dv","x3dvz"],"model/x3d+xml":["x3d","x3dz"],"model/x3d-vrml":["x3dv"],"text/cache-manifest":["appcache","manifest"],"text/calendar":["ics","ifb"],"text/coffeescript":["coffee","litcoffee"],"text/css":["css"],"text/csv":["csv"],"text/html":["html","htm","shtml"],"text/jade":["jade"],"text/jsx":["jsx"],"text/less":["less"],"text/markdown":["markdown","md"],"text/mathml":["mml"],"text/mdx":["mdx"],"text/n3":["n3"],"text/plain":["txt","text","conf","def","list","log","in","ini"],"text/richtext":["rtx"],"text/rtf":["*rtf"],"text/sgml":["sgml","sgm"],"text/shex":["shex"],"text/slim":["slim","slm"],"text/spdx":["spdx"],"text/stylus":["stylus","styl"],"text/tab-separated-values":["tsv"],"text/troff":["t","tr","roff","man","me","ms"],"text/turtle":["ttl"],"text/uri-list":["uri","uris","urls"],"text/vcard":["vcard"],"text/vtt":["vtt"],"text/xml":["*xml"],"text/yaml":["yaml","yml"],"video/3gpp":["3gp","3gpp"],"video/3gpp2":["3g2"],"video/h261":["h261"],"video/h263":["h263"],"video/h264":["h264"],"video/iso.segment":["m4s"],"video/jpeg":["jpgv"],"video/jpm":["*jpm","jpgm"],"video/mj2":["mj2","mjp2"],"video/mp2t":["ts"],"video/mp4":["mp4","mp4v","mpg4"],"video/mpeg":["mpeg","mpg","mpe","m1v","m2v"],"video/ogg":["ogv"],"video/quicktime":["qt","mov"],"video/webm":["webm"]}; + /***/ }), /***/ 3772: @@ -52518,6 +72488,175 @@ function regExpEscape (s) { } +/***/ }), + +/***/ 744: +/***/ ((module) => { + +/** + * Helpers. + */ + +var s = 1000; +var m = s * 60; +var h = m * 60; +var d = h * 24; +var w = d * 7; +var y = d * 365.25; + +/** + * Parse or format the given `val`. + * + * Options: + * + * - `long` verbose formatting [false] + * + * @param {String|Number} val + * @param {Object} [options] + * @throws {Error} throw an error if val is not a non-empty string or a number + * @return {String|Number} + * @api public + */ + +module.exports = function(val, options) { + options = options || {}; + var type = typeof val; + if (type === 'string' && val.length > 0) { + return parse(val); + } else if (type === 'number' && isFinite(val)) { + return options.long ? fmtLong(val) : fmtShort(val); + } + throw new Error( + 'val is not a non-empty string or a valid number. val=' + + JSON.stringify(val) + ); +}; + +/** + * Parse the given `str` and return milliseconds. + * + * @param {String} str + * @return {Number} + * @api private + */ + +function parse(str) { + str = String(str); + if (str.length > 100) { + return; + } + var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( + str + ); + if (!match) { + return; + } + var n = parseFloat(match[1]); + var type = (match[2] || 'ms').toLowerCase(); + switch (type) { + case 'years': + case 'year': + case 'yrs': + case 'yr': + case 'y': + return n * y; + case 'weeks': + case 'week': + case 'w': + return n * w; + case 'days': + case 'day': + case 'd': + return n * d; + case 'hours': + case 'hour': + case 'hrs': + case 'hr': + case 'h': + return n * h; + case 'minutes': + case 'minute': + case 'mins': + case 'min': + case 'm': + return n * m; + case 'seconds': + case 'second': + case 'secs': + case 'sec': + case 's': + return n * s; + case 'milliseconds': + case 'millisecond': + case 'msecs': + case 'msec': + case 'ms': + return n; + default: + return undefined; + } +} + +/** + * Short format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtShort(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return Math.round(ms / d) + 'd'; + } + if (msAbs >= h) { + return Math.round(ms / h) + 'h'; + } + if (msAbs >= m) { + return Math.round(ms / m) + 'm'; + } + if (msAbs >= s) { + return Math.round(ms / s) + 's'; + } + return ms + 'ms'; +} + +/** + * Long format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtLong(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return plural(ms, msAbs, d, 'day'); + } + if (msAbs >= h) { + return plural(ms, msAbs, h, 'hour'); + } + if (msAbs >= m) { + return plural(ms, msAbs, m, 'minute'); + } + if (msAbs >= s) { + return plural(ms, msAbs, s, 'second'); + } + return ms + ' ms'; +} + +/** + * Pluralization helper. + */ + +function plural(ms, msAbs, n, name) { + var isPlural = msAbs >= n * 1.5; + return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : ''); +} + + /***/ }), /***/ 6705: @@ -53891,10 +74030,6 @@ function getNodeRequestOptions(request) { agent = agent(parsedURL); } - if (!headers.has('Connection') && !agent) { - headers.set('Connection', 'close'); - } - // HTTP-network fetch step 4.2 // chunked encoding is handled by Node.js @@ -53943,6 +74078,20 @@ const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); }; +/** + * isSameProtocol reports whether the two provided URLs use the same protocol. + * + * Both domains must already be in canonical form. + * @param {string|URL} original + * @param {string|URL} destination + */ +const isSameProtocol = function isSameProtocol(destination, original) { + const orig = new URL$1(original).protocol; + const dest = new URL$1(destination).protocol; + + return orig === dest; +}; + /** * Fetch function * @@ -53974,7 +74123,7 @@ function fetch(url, opts) { let error = new AbortError('The user aborted a request.'); reject(error); if (request.body && request.body instanceof Stream.Readable) { - request.body.destroy(error); + destroyStream(request.body, error); } if (!response || !response.body) return; response.body.emit('error', error); @@ -54015,9 +74164,43 @@ function fetch(url, opts) { req.on('error', function (err) { reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); + + if (response && response.body) { + destroyStream(response.body, err); + } + finalize(); }); + fixResponseChunkedTransferBadEnding(req, function (err) { + if (signal && signal.aborted) { + return; + } + + if (response && response.body) { + destroyStream(response.body, err); + } + }); + + /* c8 ignore next 18 */ + if (parseInt(process.version.substring(1)) < 14) { + // Before Node.js 14, pipeline() does not fully support async iterators and does not always + // properly handle when the socket close/end events are out of order. + req.on('socket', function (s) { + s.addListener('close', function (hadError) { + // if a data listener is still present we didn't end cleanly + const hasDataListener = s.listenerCount('data') > 0; + + // if end happened before close but the socket didn't emit an error, do it now + if (response && hasDataListener && !hadError && !(signal && signal.aborted)) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + response.body.emit('error', err); + } + }); + }); + } + req.on('response', function (res) { clearTimeout(reqTimeout); @@ -54089,7 +74272,7 @@ function fetch(url, opts) { size: request.size }; - if (!isDomainOrSubdomain(request.url, locationURL)) { + if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) { for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { requestOpts.headers.delete(name); } @@ -54182,6 +74365,13 @@ function fetch(url, opts) { response = new Response(body, response_options); resolve(response); }); + raw.on('end', function () { + // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted. + if (!response) { + response = new Response(body, response_options); + resolve(response); + } + }); return; } @@ -54201,6 +74391,44 @@ function fetch(url, opts) { writeToStream(req, request); }); } +function fixResponseChunkedTransferBadEnding(request, errorCallback) { + let socket; + + request.on('socket', function (s) { + socket = s; + }); + + request.on('response', function (response) { + const headers = response.headers; + + if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) { + response.once('close', function (hadError) { + // tests for socket presence, as in some situations the + // the 'socket' event is not triggered for the request + // (happens in deno), avoids `TypeError` + // if a data listener is still present we didn't end cleanly + const hasDataListener = socket && socket.listenerCount('data') > 0; + + if (hasDataListener && !hadError) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + errorCallback(err); + } + }); + } + }); +} + +function destroyStream(stream, err) { + if (stream.destroy) { + stream.destroy(err); + } else { + // node < 8 + stream.emit('error', err); + stream.end(); + } +} + /** * Redirect code matching * @@ -54221,6 +74449,7 @@ exports.Headers = Headers; exports.Request = Request; exports.Response = Response; exports.FetchError = FetchError; +exports.AbortError = AbortError; /***/ }), @@ -56385,6 +76614,3674 @@ module.exports.implForWrapper = function (wrapper) { +/***/ }), + +/***/ 5560: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var wrappy = __nccwpck_require__(8264) +module.exports = wrappy(once) +module.exports.strict = wrappy(onceStrict) + +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true + }) + + Object.defineProperty(Function.prototype, 'onceStrict', { + value: function () { + return onceStrict(this) + }, + configurable: true + }) +}) + +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) + } + f.called = false + return f +} + +function onceStrict (fn) { + var f = function () { + if (f.called) + throw new Error(f.onceError) + f.called = true + return f.value = fn.apply(this, arguments) + } + var name = fn.name || 'Function wrapped with `once`' + f.onceError = name + " shouldn't be called more than once" + f.called = false + return f +} + + +/***/ }), + +/***/ 5500: +/***/ ((module) => { + +"use strict"; + + +const codes = {}; + +function createErrorType(code, message, Base) { + if (!Base) { + Base = Error + } + + function getMessage (arg1, arg2, arg3) { + if (typeof message === 'string') { + return message + } else { + return message(arg1, arg2, arg3) + } + } + + class NodeError extends Base { + constructor (arg1, arg2, arg3) { + super(getMessage(arg1, arg2, arg3)); + } + } + + NodeError.prototype.name = Base.name; + NodeError.prototype.code = code; + + codes[code] = NodeError; +} + +// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js +function oneOf(expected, thing) { + if (Array.isArray(expected)) { + const len = expected.length; + expected = expected.map((i) => String(i)); + if (len > 2) { + return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` + + expected[len - 1]; + } else if (len === 2) { + return `one of ${thing} ${expected[0]} or ${expected[1]}`; + } else { + return `of ${thing} ${expected[0]}`; + } + } else { + return `of ${thing} ${String(expected)}`; + } +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith +function startsWith(str, search, pos) { + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith +function endsWith(str, search, this_len) { + if (this_len === undefined || this_len > str.length) { + this_len = str.length; + } + return str.substring(this_len - search.length, this_len) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes +function includes(str, search, start) { + if (typeof start !== 'number') { + start = 0; + } + + if (start + search.length > str.length) { + return false; + } else { + return str.indexOf(search, start) !== -1; + } +} + +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { + return 'The value "' + value + '" is invalid for option "' + name + '"' +}, TypeError); +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { + // determiner: 'must be' or 'must not be' + let determiner; + if (typeof expected === 'string' && startsWith(expected, 'not ')) { + determiner = 'must not be'; + expected = expected.replace(/^not /, ''); + } else { + determiner = 'must be'; + } + + let msg; + if (endsWith(name, ' argument')) { + // For cases like 'first argument' + msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`; + } else { + const type = includes(name, '.') ? 'property' : 'argument'; + msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`; + } + + msg += `. Received type ${typeof actual}`; + return msg; +}, TypeError); +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { + return 'The ' + name + ' method is not implemented' +}); +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); +createErrorType('ERR_STREAM_DESTROYED', function (name) { + return 'Cannot call ' + name + ' after a stream was destroyed'; +}); +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { + return 'Unknown encoding: ' + arg +}, TypeError); +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); + +module.exports.F = codes; + + +/***/ }), + +/***/ 2063: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. + + + +/**/ +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) keys.push(key); + return keys; +}; +/**/ + +module.exports = Duplex; +var Readable = __nccwpck_require__(9274); +var Writable = __nccwpck_require__(8797); +__nccwpck_require__(9598)(Duplex, Readable); +{ + // Allow the keys array to be GC'ed. + var keys = objectKeys(Writable.prototype); + for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + } +} +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + Readable.call(this, options); + Writable.call(this, options); + this.allowHalfOpen = true; + if (options) { + if (options.readable === false) this.readable = false; + if (options.writable === false) this.writable = false; + if (options.allowHalfOpen === false) { + this.allowHalfOpen = false; + this.once('end', onend); + } + } +} +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); +Object.defineProperty(Duplex.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); +Object.defineProperty(Duplex.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); + +// the no-half-open enforcer +function onend() { + // If the writable side ended, then we're ok. + if (this._writableState.ended) return; + + // no more data can be written. + // But allow more writes to happen in this tick. + process.nextTick(onEndNT, this); +} +function onEndNT(self) { + self.end(); +} +Object.defineProperty(Duplex.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._readableState === undefined || this._writableState === undefined) { + return false; + } + return this._readableState.destroyed && this._writableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } +}); + +/***/ }), + +/***/ 5283: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. + + + +module.exports = PassThrough; +var Transform = __nccwpck_require__(2337); +__nccwpck_require__(9598)(PassThrough, Transform); +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + Transform.call(this, options); +} +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; + +/***/ }), + +/***/ 9274: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + +module.exports = Readable; + +/**/ +var Duplex; +/**/ + +Readable.ReadableState = ReadableState; + +/**/ +var EE = (__nccwpck_require__(4434).EventEmitter); +var EElistenerCount = function EElistenerCount(emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ +var Stream = __nccwpck_require__(3283); +/**/ + +var Buffer = (__nccwpck_require__(181).Buffer); +var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ +var debugUtil = __nccwpck_require__(9023); +var debug; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function debug() {}; +} +/**/ + +var BufferList = __nccwpck_require__(7336); +var destroyImpl = __nccwpck_require__(5089); +var _require = __nccwpck_require__(4874), + getHighWaterMark = _require.getHighWaterMark; +var _require$codes = (__nccwpck_require__(5500)/* .codes */ .F), + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; + +// Lazy loaded to improve the startup performance. +var StringDecoder; +var createReadableStreamAsyncIterator; +var from; +__nccwpck_require__(9598)(Readable, Stream); +var errorOrDestroy = destroyImpl.errorOrDestroy; +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); + + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +} +function ReadableState(options, stream, isDuplex) { + Duplex = Duplex || __nccwpck_require__(2063); + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; + + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; + + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); + + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + + // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + this.sync = true; + + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + this.paused = true; + + // Should close be emitted on destroy. Defaults to true. + this.emitClose = options.emitClose !== false; + + // Should .destroy() be called after 'end' (and potentially 'finish') + this.autoDestroy = !!options.autoDestroy; + + // has it been destroyed + this.destroyed = false; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; + + // if true, a maybeReadMore has been scheduled + this.readingMore = false; + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = (__nccwpck_require__(634)/* .StringDecoder */ .I); + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} +function Readable(options) { + Duplex = Duplex || __nccwpck_require__(2063); + if (!(this instanceof Readable)) return new Readable(options); + + // Checking for a Stream.Duplex instance is faster here instead of inside + // the ReadableState constructor, at least with V8 6.5 + var isDuplex = this instanceof Duplex; + this._readableState = new ReadableState(options, this, isDuplex); + + // legacy + this.readable = true; + if (options) { + if (typeof options.read === 'function') this._read = options.read; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + } + Stream.call(this); +} +Object.defineProperty(Readable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._readableState === undefined) { + return false; + } + return this._readableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + } +}); +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; +Readable.prototype._destroy = function (err, cb) { + cb(err); +}; + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; + } + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; + +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + debug('readableAddChunk', chunk); + var state = stream._readableState; + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + if (er) { + errorOrDestroy(stream, er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + if (addToFront) { + if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true); + } else if (state.ended) { + errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()); + } else if (state.destroyed) { + return false; + } else { + state.reading = false; + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + maybeReadMore(stream, state); + } + } + + // We can push more data if we are below the highWaterMark. + // Also, if we have no data yet, we can stand some more bytes. + // This is to work around cases where hwm=0, such as the repl. + return !state.ended && (state.length < state.highWaterMark || state.length === 0); +} +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + state.awaitDrain = 0; + stream.emit('data', chunk); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + if (state.needReadable) emitReadable(stream); + } + maybeReadMore(stream, state); +} +function chunkInvalid(state, chunk) { + var er; + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk); + } + return er; +} +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; + +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = (__nccwpck_require__(634)/* .StringDecoder */ .I); + var decoder = new StringDecoder(enc); + this._readableState.decoder = decoder; + // If setEncoding(null), decoder.encoding equals utf8 + this._readableState.encoding = this._readableState.decoder.encoding; + + // Iterate over current buffer to convert already stored Buffers: + var p = this._readableState.buffer.head; + var content = ''; + while (p !== null) { + content += decoder.write(p.data); + p = p.next; + } + this._readableState.buffer.clear(); + if (content !== '') this._readableState.buffer.push(content); + this._readableState.length = content.length; + return this; +}; + +// Don't raise the hwm > 1GB +var MAX_HWM = 0x40000000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE. + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; +} + +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + // Don't have enough + if (!state.ended) { + state.needReadable = true; + return 0; + } + return state.length; +} + +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + if (n !== 0) state.emittedReadable = false; + + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + n = howMuchToRead(n, state); + + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } + + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); + + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } + + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); + } + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + if (ret === null) { + state.needReadable = state.length <= state.highWaterMark; + n = 0; + } else { + state.length -= n; + state.awaitDrain = 0; + } + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this); + } + if (ret !== null) this.emit('data', ret); + return ret; +}; +function onEofChunk(stream, state) { + debug('onEofChunk'); + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + if (state.sync) { + // if we are sync, wait until next tick to emit the data. + // Otherwise we risk emitting data in the flow() + // the readable code triggers during a read() call + emitReadable(stream); + } else { + // emit 'readable' now to make sure it gets picked up. + state.needReadable = false; + if (!state.emittedReadable) { + state.emittedReadable = true; + emitReadable_(stream); + } + } +} + +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + debug('emitReadable', state.needReadable, state.emittedReadable); + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + process.nextTick(emitReadable_, stream); + } +} +function emitReadable_(stream) { + var state = stream._readableState; + debug('emitReadable_', state.destroyed, state.length, state.ended); + if (!state.destroyed && (state.length || state.ended)) { + stream.emit('readable'); + state.emittedReadable = false; + } + + // The stream needs another readable event if + // 1. It is not flowing, as the flow mechanism will take + // care of it. + // 2. It is not ended. + // 3. It is below the highWaterMark, so we can schedule + // another readable later. + state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark; + flow(stream); +} + +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + process.nextTick(maybeReadMore_, stream, state); + } +} +function maybeReadMore_(stream, state) { + // Attempt to read more data if we should. + // + // The conditions for reading more data are (one of): + // - Not enough data buffered (state.length < state.highWaterMark). The loop + // is responsible for filling the buffer with enough data if such data + // is available. If highWaterMark is 0 and we are not in the flowing mode + // we should _not_ attempt to buffer any extra data. We'll get more data + // when the stream consumer calls read() instead. + // - No data in the buffer, and the stream is in flowing mode. In this mode + // the loop below is responsible for ensuring read() is called. Failing to + // call read here would abort the flow and there's no other mechanism for + // continuing the flow if the stream consumer has just subscribed to the + // 'data' event. + // + // In addition to the above conditions to keep reading data, the following + // conditions prevent the data from being read: + // - The stream has ended (state.ended). + // - There is already a pending 'read' operation (state.reading). This is a + // case where the the stream has called the implementation defined _read() + // method, but they are processing the call asynchronously and have _not_ + // called push() with new data. In this case we skip performing more + // read()s. The execution ends in this method again after the _read() ends + // up calling push() with more data. + while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) { + var len = state.length; + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break; + } + state.readingMore = false; +} + +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()')); +}; +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn); + dest.on('unpipe', onunpipe); + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } + function onend() { + debug('onend'); + dest.end(); + } + + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); + cleanedUp = true; + + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + var ret = dest.write(chunk); + debug('dest.write', ret); + if (ret === false) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', state.awaitDrain); + state.awaitDrain++; + } + src.pause(); + } + } + + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er); + } + + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); + + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } + + // tell the dest that it's being piped to + dest.emit('pipe', src); + + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + return dest; +}; +function pipeOnDrain(src) { + return function pipeOnDrainFunctionResult() { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { + hasUnpiped: false + }; + + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; + + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + if (!dest) dest = state.pipes; + + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; + } + + // slow case. multiple pipe destinations. + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + for (var i = 0; i < len; i++) dests[i].emit('unpipe', this, { + hasUnpiped: false + }); + return this; + } + + // try to find the right one. + var index = indexOf(state.pipes, dest); + if (index === -1) return this; + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + dest.emit('unpipe', this, unpipeInfo); + return this; +}; + +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + var state = this._readableState; + if (ev === 'data') { + // update readableListening so that resume() may be a no-op + // a few lines down. This is needed to support once('readable'). + state.readableListening = this.listenerCount('readable') > 0; + + // Try start flowing on next tick if stream isn't explicitly paused + if (state.flowing !== false) this.resume(); + } else if (ev === 'readable') { + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.flowing = false; + state.emittedReadable = false; + debug('on readable', state.length, state.reading); + if (state.length) { + emitReadable(this); + } else if (!state.reading) { + process.nextTick(nReadingNextTick, this); + } + } + } + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; +Readable.prototype.removeListener = function (ev, fn) { + var res = Stream.prototype.removeListener.call(this, ev, fn); + if (ev === 'readable') { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + return res; +}; +Readable.prototype.removeAllListeners = function (ev) { + var res = Stream.prototype.removeAllListeners.apply(this, arguments); + if (ev === 'readable' || ev === undefined) { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + return res; +}; +function updateReadableListening(self) { + var state = self._readableState; + state.readableListening = self.listenerCount('readable') > 0; + if (state.resumeScheduled && !state.paused) { + // flowing needs to be set to true now, otherwise + // the upcoming resume will not flow. + state.flowing = true; + + // crude way to check if we should resume + } else if (self.listenerCount('data') > 0) { + self.resume(); + } +} +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} + +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + // we flow only if there is no one listening + // for readable, but we still have to call + // resume() + state.flowing = !state.readableListening; + resume(this, state); + } + state.paused = false; + return this; +}; +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + process.nextTick(resume_, stream, state); + } +} +function resume_(stream, state) { + debug('resume', state.reading); + if (!state.reading) { + stream.read(0); + } + state.resumeScheduled = false; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (this._readableState.flowing !== false) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + this._readableState.paused = true; + return this; +}; +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null); +} + +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var _this = this; + var state = this._readableState; + var paused = false; + stream.on('end', function () { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); + } + _this.push(null); + }); + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); + + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + var ret = _this.push(chunk); + if (!ret) { + paused = true; + stream.pause(); + } + }); + + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function methodWrap(method) { + return function methodWrapReturnFunction() { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } + + // proxy certain important events. + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } + + // when we try to consume some more bytes, simply unpause the + // underlying stream. + this._read = function (n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; + return this; +}; +if (typeof Symbol === 'function') { + Readable.prototype[Symbol.asyncIterator] = function () { + if (createReadableStreamAsyncIterator === undefined) { + createReadableStreamAsyncIterator = __nccwpck_require__(4868); + } + return createReadableStreamAsyncIterator(this); + }; +} +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.highWaterMark; + } +}); +Object.defineProperty(Readable.prototype, 'readableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState && this._readableState.buffer; + } +}); +Object.defineProperty(Readable.prototype, 'readableFlowing', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.flowing; + }, + set: function set(state) { + if (this._readableState) { + this._readableState.flowing = state; + } + } +}); + +// exposed for testing purposes only. +Readable._fromList = fromList; +Object.defineProperty(Readable.prototype, 'readableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.length; + } +}); + +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = state.buffer.consume(n, state.decoder); + } + return ret; +} +function endReadable(stream) { + var state = stream._readableState; + debug('endReadable', state.endEmitted); + if (!state.endEmitted) { + state.ended = true; + process.nextTick(endReadableNT, state, stream); + } +} +function endReadableNT(state, stream) { + debug('endReadableNT', state.endEmitted, state.length); + + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the writable side is ready for autoDestroy as well + var wState = stream._writableState; + if (!wState || wState.autoDestroy && wState.finished) { + stream.destroy(); + } + } + } +} +if (typeof Symbol === 'function') { + Readable.from = function (iterable, opts) { + if (from === undefined) { + from = __nccwpck_require__(4659); + } + return from(Readable, iterable, opts); + }; +} +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} + +/***/ }), + +/***/ 2337: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. + + + +module.exports = Transform; +var _require$codes = (__nccwpck_require__(5500)/* .codes */ .F), + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING, + ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0; +var Duplex = __nccwpck_require__(2063); +__nccwpck_require__(9598)(Transform, Duplex); +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + var cb = ts.writecb; + if (cb === null) { + return this.emit('error', new ERR_MULTIPLE_CALLBACK()); + } + ts.writechunk = null; + ts.writecb = null; + if (data != null) + // single equals check for both `null` and `undefined` + this.push(data); + cb(er); + var rs = this._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); + } +} +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + Duplex.call(this, options); + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; + + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; + + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + if (typeof options.flush === 'function') this._flush = options.flush; + } + + // When the writable side finishes, then flush out anything remaining. + this.on('prefinish', prefinish); +} +function prefinish() { + var _this = this; + if (typeof this._flush === 'function' && !this._readableState.destroyed) { + this._flush(function (er, data) { + done(_this, er, data); + }); + } else { + done(this, null, null); + } +} +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; + +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()')); +}; +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; + +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; + if (ts.writechunk !== null && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; +Transform.prototype._destroy = function (err, cb) { + Duplex.prototype._destroy.call(this, err, function (err2) { + cb(err2); + }); +}; +function done(stream, er, data) { + if (er) return stream.emit('error', er); + if (data != null) + // single equals check for both `null` and `undefined` + stream.push(data); + + // TODO(BridgeAR): Write a test for these two error cases + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0(); + if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING(); + return stream.push(null); +} + +/***/ }), + +/***/ 8797: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. + + + +module.exports = Writable; + +/* */ +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} + +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + var _this = this; + this.next = null; + this.entry = null; + this.finish = function () { + onCorkedFinish(_this, state); + }; +} +/* */ + +/**/ +var Duplex; +/**/ + +Writable.WritableState = WritableState; + +/**/ +var internalUtil = { + deprecate: __nccwpck_require__(4488) +}; +/**/ + +/**/ +var Stream = __nccwpck_require__(3283); +/**/ + +var Buffer = (__nccwpck_require__(181).Buffer); +var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} +var destroyImpl = __nccwpck_require__(5089); +var _require = __nccwpck_require__(4874), + getHighWaterMark = _require.getHighWaterMark; +var _require$codes = (__nccwpck_require__(5500)/* .codes */ .F), + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED, + ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES, + ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END, + ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING; +var errorOrDestroy = destroyImpl.errorOrDestroy; +__nccwpck_require__(9598)(Writable, Stream); +function nop() {} +function WritableState(options, stream, isDuplex) { + Duplex = Duplex || __nccwpck_require__(2063); + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream, + // e.g. options.readableObjectMode vs. options.writableObjectMode, etc. + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; + + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); + + // if _final has been called + this.finalCalled = false; + + // drain event flag. + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; + + // has it been destroyed + this.destroyed = false; + + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; + + // a flag to see when we're in the middle of a write. + this.writing = false; + + // when true all writes will be buffered until .uncork() call + this.corked = 0; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; + + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; + + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + + // the amount that is being written when _write is called. + this.writelen = 0; + this.bufferedRequest = null; + this.lastBufferedRequest = null; + + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; + + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; + + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; + + // Should close be emitted on destroy. Defaults to true. + this.emitClose = options.emitClose !== false; + + // Should .destroy() be called after 'finish' (and potentially 'end') + this.autoDestroy = !!options.autoDestroy; + + // count buffered requests + this.bufferedRequestCount = 0; + + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); +} +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; + } + return out; +}; +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function writableStateBufferGetter() { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); + +// Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. +var realHasInstance; +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function value(object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; + return object && object._writableState instanceof WritableState; + } + }); +} else { + realHasInstance = function realHasInstance(object) { + return object instanceof this; + }; +} +function Writable(options) { + Duplex = Duplex || __nccwpck_require__(2063); + + // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. + + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + + // Checking for a Stream.Duplex instance is faster here instead of inside + // the WritableState constructor, at least with V8 6.5 + var isDuplex = this instanceof Duplex; + if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options); + this._writableState = new WritableState(options, this, isDuplex); + + // legacy. + this.writable = true; + if (options) { + if (typeof options.write === 'function') this._write = options.write; + if (typeof options.writev === 'function') this._writev = options.writev; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + if (typeof options.final === 'function') this._final = options.final; + } + Stream.call(this); +} + +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); +}; +function writeAfterEnd(stream, cb) { + var er = new ERR_STREAM_WRITE_AFTER_END(); + // TODO: defer error events consistently everywhere, not just the cb + errorOrDestroy(stream, er); + process.nextTick(cb, er); +} + +// Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. +function validChunk(stream, state, chunk, cb) { + var er; + if (chunk === null) { + er = new ERR_STREAM_NULL_VALUES(); + } else if (typeof chunk !== 'string' && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk); + } + if (er) { + errorOrDestroy(stream, er); + process.nextTick(cb, er); + return false; + } + return true; +} +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + var isBuf = !state.objectMode && _isUint8Array(chunk); + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + if (typeof cb !== 'function') cb = nop; + if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } + return ret; +}; +Writable.prototype.cork = function () { + this._writableState.corked++; +}; +Writable.prototype.uncork = function () { + var state = this._writableState; + if (state.corked) { + state.corked--; + if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; +Object.defineProperty(Writable.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + return chunk; +} +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); + +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; + } + } + var len = state.objectMode ? 1 : chunk.length; + state.length += len; + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk: chunk, + encoding: encoding, + isBuf: isBuf, + callback: cb, + next: null + }; + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + return ret; +} +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + process.nextTick(cb, er); + // this can emit finish, and it will always happen + // after error + process.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); + // this can emit finish, but finish must + // always follow error + finishMaybe(stream, state); + } +} +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK(); + onwriteStateUpdate(state); + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state) || stream.destroyed; + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + if (sync) { + process.nextTick(afterWrite, stream, state, finished, cb); + } else { + afterWrite(stream, state, finished, cb); + } + } +} +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} + +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} + +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + var count = 0; + var allBuffers = true; + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + buffer.allBuffers = allBuffers; + doWrite(stream, state, true, state.length, buffer, '', holder.finish); + + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } + } + if (entry === null) state.lastBufferedRequest = null; + } + state.bufferedRequest = entry; + state.bufferProcessing = false; +} +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()')); +}; +Writable.prototype._writev = null; +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); + + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } + + // ignore unnecessary end() calls. + if (!state.ending) endWritable(this, state, cb); + return this; +}; +Object.defineProperty(Writable.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} +function callFinal(stream, state) { + stream._final(function (err) { + state.pendingcb--; + if (err) { + errorOrDestroy(stream, err); + } + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); +} +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function' && !state.destroyed) { + state.pendingcb++; + state.finalCalled = true; + process.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); + } + } +} +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + prefinish(stream, state); + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the readable side is ready for autoDestroy as well + var rState = stream._readableState; + if (!rState || rState.autoDestroy && rState.endEmitted) { + stream.destroy(); + } + } + } + } + return need; +} +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) process.nextTick(cb);else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; +} +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } + + // reuse the free corkReq. + state.corkedRequestsFree.next = corkReq; +} +Object.defineProperty(Writable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._writableState === undefined) { + return false; + } + return this._writableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._writableState.destroyed = value; + } +}); +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; +Writable.prototype._destroy = function (err, cb) { + cb(err); +}; + +/***/ }), + +/***/ 4868: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var _Object$setPrototypeO; +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var finished = __nccwpck_require__(6815); +var kLastResolve = Symbol('lastResolve'); +var kLastReject = Symbol('lastReject'); +var kError = Symbol('error'); +var kEnded = Symbol('ended'); +var kLastPromise = Symbol('lastPromise'); +var kHandlePromise = Symbol('handlePromise'); +var kStream = Symbol('stream'); +function createIterResult(value, done) { + return { + value: value, + done: done + }; +} +function readAndResolve(iter) { + var resolve = iter[kLastResolve]; + if (resolve !== null) { + var data = iter[kStream].read(); + // we defer if data is null + // we can be expecting either 'end' or + // 'error' + if (data !== null) { + iter[kLastPromise] = null; + iter[kLastResolve] = null; + iter[kLastReject] = null; + resolve(createIterResult(data, false)); + } + } +} +function onReadable(iter) { + // we wait for the next tick, because it might + // emit an error with process.nextTick + process.nextTick(readAndResolve, iter); +} +function wrapForNext(lastPromise, iter) { + return function (resolve, reject) { + lastPromise.then(function () { + if (iter[kEnded]) { + resolve(createIterResult(undefined, true)); + return; + } + iter[kHandlePromise](resolve, reject); + }, reject); + }; +} +var AsyncIteratorPrototype = Object.getPrototypeOf(function () {}); +var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = { + get stream() { + return this[kStream]; + }, + next: function next() { + var _this = this; + // if we have detected an error in the meanwhile + // reject straight away + var error = this[kError]; + if (error !== null) { + return Promise.reject(error); + } + if (this[kEnded]) { + return Promise.resolve(createIterResult(undefined, true)); + } + if (this[kStream].destroyed) { + // We need to defer via nextTick because if .destroy(err) is + // called, the error will be emitted via nextTick, and + // we cannot guarantee that there is no error lingering around + // waiting to be emitted. + return new Promise(function (resolve, reject) { + process.nextTick(function () { + if (_this[kError]) { + reject(_this[kError]); + } else { + resolve(createIterResult(undefined, true)); + } + }); + }); + } + + // if we have multiple next() calls + // we will wait for the previous Promise to finish + // this logic is optimized to support for await loops, + // where next() is only called once at a time + var lastPromise = this[kLastPromise]; + var promise; + if (lastPromise) { + promise = new Promise(wrapForNext(lastPromise, this)); + } else { + // fast path needed to support multiple this.push() + // without triggering the next() queue + var data = this[kStream].read(); + if (data !== null) { + return Promise.resolve(createIterResult(data, false)); + } + promise = new Promise(this[kHandlePromise]); + } + this[kLastPromise] = promise; + return promise; + } +}, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () { + return this; +}), _defineProperty(_Object$setPrototypeO, "return", function _return() { + var _this2 = this; + // destroy(err, cb) is a private API + // we can guarantee we have that here, because we control the + // Readable class this is attached to + return new Promise(function (resolve, reject) { + _this2[kStream].destroy(null, function (err) { + if (err) { + reject(err); + return; + } + resolve(createIterResult(undefined, true)); + }); + }); +}), _Object$setPrototypeO), AsyncIteratorPrototype); +var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) { + var _Object$create; + var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, { + value: stream, + writable: true + }), _defineProperty(_Object$create, kLastResolve, { + value: null, + writable: true + }), _defineProperty(_Object$create, kLastReject, { + value: null, + writable: true + }), _defineProperty(_Object$create, kError, { + value: null, + writable: true + }), _defineProperty(_Object$create, kEnded, { + value: stream._readableState.endEmitted, + writable: true + }), _defineProperty(_Object$create, kHandlePromise, { + value: function value(resolve, reject) { + var data = iterator[kStream].read(); + if (data) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(data, false)); + } else { + iterator[kLastResolve] = resolve; + iterator[kLastReject] = reject; + } + }, + writable: true + }), _Object$create)); + iterator[kLastPromise] = null; + finished(stream, function (err) { + if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { + var reject = iterator[kLastReject]; + // reject if we are waiting for data in the Promise + // returned by next() and store the error + if (reject !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + reject(err); + } + iterator[kError] = err; + return; + } + var resolve = iterator[kLastResolve]; + if (resolve !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(undefined, true)); + } + iterator[kEnded] = true; + }); + stream.on('readable', onReadable.bind(null, iterator)); + return iterator; +}; +module.exports = createReadableStreamAsyncIterator; + +/***/ }), + +/***/ 7336: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } +function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, _toPropertyKey(descriptor.key), descriptor); } } +function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); Object.defineProperty(Constructor, "prototype", { writable: false }); return Constructor; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var _require = __nccwpck_require__(181), + Buffer = _require.Buffer; +var _require2 = __nccwpck_require__(9023), + inspect = _require2.inspect; +var custom = inspect && inspect.custom || 'inspect'; +function copyBuffer(src, target, offset) { + Buffer.prototype.copy.call(src, target, offset); +} +module.exports = /*#__PURE__*/function () { + function BufferList() { + _classCallCheck(this, BufferList); + this.head = null; + this.tail = null; + this.length = 0; + } + _createClass(BufferList, [{ + key: "push", + value: function push(v) { + var entry = { + data: v, + next: null + }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + } + }, { + key: "unshift", + value: function unshift(v) { + var entry = { + data: v, + next: this.head + }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + } + }, { + key: "shift", + value: function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + } + }, { + key: "clear", + value: function clear() { + this.head = this.tail = null; + this.length = 0; + } + }, { + key: "join", + value: function join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) ret += s + p.data; + return ret; + } + }, { + key: "concat", + value: function concat(n) { + if (this.length === 0) return Buffer.alloc(0); + var ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + return ret; + } + + // Consumes a specified amount of bytes or characters from the buffered data. + }, { + key: "consume", + value: function consume(n, hasStrings) { + var ret; + if (n < this.head.data.length) { + // `slice` is the same for buffers and strings. + ret = this.head.data.slice(0, n); + this.head.data = this.head.data.slice(n); + } else if (n === this.head.data.length) { + // First chunk is a perfect match. + ret = this.shift(); + } else { + // Result spans more than one buffer. + ret = hasStrings ? this._getString(n) : this._getBuffer(n); + } + return ret; + } + }, { + key: "first", + value: function first() { + return this.head.data; + } + + // Consumes a specified amount of characters from the buffered data. + }, { + key: "_getString", + value: function _getString(n) { + var p = this.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = str.slice(nb); + } + break; + } + ++c; + } + this.length -= c; + return ret; + } + + // Consumes a specified amount of bytes from the buffered data. + }, { + key: "_getBuffer", + value: function _getBuffer(n) { + var ret = Buffer.allocUnsafe(n); + var p = this.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = buf.slice(nb); + } + break; + } + ++c; + } + this.length -= c; + return ret; + } + + // Make sure the linked list only shows the minimal necessary information. + }, { + key: custom, + value: function value(_, options) { + return inspect(this, _objectSpread(_objectSpread({}, options), {}, { + // Only inspect one level. + depth: 0, + // It should not recurse. + customInspect: false + })); + } + }]); + return BufferList; +}(); + +/***/ }), + +/***/ 5089: +/***/ ((module) => { + +"use strict"; + + +// undocumented cb() API, needed for core, not for public API +function destroy(err, cb) { + var _this = this; + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err) { + if (!this._writableState) { + process.nextTick(emitErrorNT, this, err); + } else if (!this._writableState.errorEmitted) { + this._writableState.errorEmitted = true; + process.nextTick(emitErrorNT, this, err); + } + } + return this; + } + + // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks + + if (this._readableState) { + this._readableState.destroyed = true; + } + + // if this is a duplex stream mark the writable part as destroyed as well + if (this._writableState) { + this._writableState.destroyed = true; + } + this._destroy(err || null, function (err) { + if (!cb && err) { + if (!_this._writableState) { + process.nextTick(emitErrorAndCloseNT, _this, err); + } else if (!_this._writableState.errorEmitted) { + _this._writableState.errorEmitted = true; + process.nextTick(emitErrorAndCloseNT, _this, err); + } else { + process.nextTick(emitCloseNT, _this); + } + } else if (cb) { + process.nextTick(emitCloseNT, _this); + cb(err); + } else { + process.nextTick(emitCloseNT, _this); + } + }); + return this; +} +function emitErrorAndCloseNT(self, err) { + emitErrorNT(self, err); + emitCloseNT(self); +} +function emitCloseNT(self) { + if (self._writableState && !self._writableState.emitClose) return; + if (self._readableState && !self._readableState.emitClose) return; + self.emit('close'); +} +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; + } + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finalCalled = false; + this._writableState.prefinished = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; + } +} +function emitErrorNT(self, err) { + self.emit('error', err); +} +function errorOrDestroy(stream, err) { + // We have tests that rely on errors being emitted + // in the same tick, so changing this is semver major. + // For now when you opt-in to autoDestroy we allow + // the error to be emitted nextTick. In a future + // semver major update we should change the default to this. + + var rState = stream._readableState; + var wState = stream._writableState; + if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err); +} +module.exports = { + destroy: destroy, + undestroy: undestroy, + errorOrDestroy: errorOrDestroy +}; + +/***/ }), + +/***/ 6815: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Ported from https://github.com/mafintosh/end-of-stream with +// permission from the author, Mathias Buus (@mafintosh). + + + +var ERR_STREAM_PREMATURE_CLOSE = (__nccwpck_require__(5500)/* .codes */ .F).ERR_STREAM_PREMATURE_CLOSE; +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { + args[_key] = arguments[_key]; + } + callback.apply(this, args); + }; +} +function noop() {} +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} +function eos(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + callback = once(callback || noop); + var readable = opts.readable || opts.readable !== false && stream.readable; + var writable = opts.writable || opts.writable !== false && stream.writable; + var onlegacyfinish = function onlegacyfinish() { + if (!stream.writable) onfinish(); + }; + var writableEnded = stream._writableState && stream._writableState.finished; + var onfinish = function onfinish() { + writable = false; + writableEnded = true; + if (!readable) callback.call(stream); + }; + var readableEnded = stream._readableState && stream._readableState.endEmitted; + var onend = function onend() { + readable = false; + readableEnded = true; + if (!writable) callback.call(stream); + }; + var onerror = function onerror(err) { + callback.call(stream, err); + }; + var onclose = function onclose() { + var err; + if (readable && !readableEnded) { + if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + if (writable && !writableEnded) { + if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + }; + var onrequest = function onrequest() { + stream.req.on('finish', onfinish); + }; + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest();else stream.on('request', onrequest); + } else if (writable && !stream._writableState) { + // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + return function () { + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +} +module.exports = eos; + +/***/ }), + +/***/ 4659: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } +function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var ERR_INVALID_ARG_TYPE = (__nccwpck_require__(5500)/* .codes */ .F).ERR_INVALID_ARG_TYPE; +function from(Readable, iterable, opts) { + var iterator; + if (iterable && typeof iterable.next === 'function') { + iterator = iterable; + } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable); + var readable = new Readable(_objectSpread({ + objectMode: true + }, opts)); + // Reading boolean to protect against _read + // being called before last iteration completion. + var reading = false; + readable._read = function () { + if (!reading) { + reading = true; + next(); + } + }; + function next() { + return _next2.apply(this, arguments); + } + function _next2() { + _next2 = _asyncToGenerator(function* () { + try { + var _yield$iterator$next = yield iterator.next(), + value = _yield$iterator$next.value, + done = _yield$iterator$next.done; + if (done) { + readable.push(null); + } else if (readable.push(yield value)) { + next(); + } else { + reading = false; + } + } catch (err) { + readable.destroy(err); + } + }); + return _next2.apply(this, arguments); + } + return readable; +} +module.exports = from; + + +/***/ }), + +/***/ 6701: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Ported from https://github.com/mafintosh/pump with +// permission from the author, Mathias Buus (@mafintosh). + + + +var eos; +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + callback.apply(void 0, arguments); + }; +} +var _require$codes = (__nccwpck_require__(5500)/* .codes */ .F), + ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED; +function noop(err) { + // Rethrow the error if it exists to avoid swallowing it + if (err) throw err; +} +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} +function destroyer(stream, reading, writing, callback) { + callback = once(callback); + var closed = false; + stream.on('close', function () { + closed = true; + }); + if (eos === undefined) eos = __nccwpck_require__(6815); + eos(stream, { + readable: reading, + writable: writing + }, function (err) { + if (err) return callback(err); + closed = true; + callback(); + }); + var destroyed = false; + return function (err) { + if (closed) return; + if (destroyed) return; + destroyed = true; + + // request.destroy just do .end - .abort is what we want + if (isRequest(stream)) return stream.abort(); + if (typeof stream.destroy === 'function') return stream.destroy(); + callback(err || new ERR_STREAM_DESTROYED('pipe')); + }; +} +function call(fn) { + fn(); +} +function pipe(from, to) { + return from.pipe(to); +} +function popCallback(streams) { + if (!streams.length) return noop; + if (typeof streams[streams.length - 1] !== 'function') return noop; + return streams.pop(); +} +function pipeline() { + for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) { + streams[_key] = arguments[_key]; + } + var callback = popCallback(streams); + if (Array.isArray(streams[0])) streams = streams[0]; + if (streams.length < 2) { + throw new ERR_MISSING_ARGS('streams'); + } + var error; + var destroys = streams.map(function (stream, i) { + var reading = i < streams.length - 1; + var writing = i > 0; + return destroyer(stream, reading, writing, function (err) { + if (!error) error = err; + if (err) destroys.forEach(call); + if (reading) return; + destroys.forEach(call); + callback(error); + }); + }); + return streams.reduce(pipe); +} +module.exports = pipeline; + +/***/ }), + +/***/ 4874: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var ERR_INVALID_OPT_VALUE = (__nccwpck_require__(5500)/* .codes */ .F).ERR_INVALID_OPT_VALUE; +function highWaterMarkFrom(options, isDuplex, duplexKey) { + return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null; +} +function getHighWaterMark(state, options, duplexKey, isDuplex) { + var hwm = highWaterMarkFrom(options, isDuplex, duplexKey); + if (hwm != null) { + if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) { + var name = isDuplex ? duplexKey : 'highWaterMark'; + throw new ERR_INVALID_OPT_VALUE(name, hwm); + } + return Math.floor(hwm); + } + + // Default value + return state.objectMode ? 16 : 16 * 1024; +} +module.exports = { + getHighWaterMark: getHighWaterMark +}; + +/***/ }), + +/***/ 3283: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +module.exports = __nccwpck_require__(2203); + + +/***/ }), + +/***/ 6131: +/***/ ((module, exports, __nccwpck_require__) => { + +var Stream = __nccwpck_require__(2203); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream.Readable; + Object.assign(module.exports, Stream); + module.exports.Stream = Stream; +} else { + exports = module.exports = __nccwpck_require__(9274); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = __nccwpck_require__(8797); + exports.Duplex = __nccwpck_require__(2063); + exports.Transform = __nccwpck_require__(2337); + exports.PassThrough = __nccwpck_require__(5283); + exports.finished = __nccwpck_require__(6815); + exports.pipeline = __nccwpck_require__(6701); +} + + +/***/ }), + +/***/ 7842: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const {PassThrough} = __nccwpck_require__(2203); +const extend = __nccwpck_require__(3860); + +let debug = () => {}; +if ( + typeof process !== 'undefined' && + 'env' in process && + typeof process.env === 'object' && + process.env.DEBUG === 'retry-request' +) { + debug = message => { + console.log('retry-request:', message); + }; +} + +const DEFAULTS = { + objectMode: false, + retries: 2, + + /* + The maximum time to delay in seconds. If retryDelayMultiplier results in a + delay greater than maxRetryDelay, retries should delay by maxRetryDelay + seconds instead. + */ + maxRetryDelay: 64, + + /* + The multiplier by which to increase the delay time between the completion of + failed requests, and the initiation of the subsequent retrying request. + */ + retryDelayMultiplier: 2, + + /* + The length of time to keep retrying in seconds. The last sleep period will + be shortened as necessary, so that the last retry runs at deadline (and not + considerably beyond it). The total time starting from when the initial + request is sent, after which an error will be returned, regardless of the + retrying attempts made meanwhile. + */ + totalTimeout: 600, + + noResponseRetries: 2, + currentRetryAttempt: 0, + shouldRetryFn: function (response) { + const retryRanges = [ + // https://en.wikipedia.org/wiki/List_of_HTTP_status_codes + // 1xx - Retry (Informational, request still processing) + // 2xx - Do not retry (Success) + // 3xx - Do not retry (Redirect) + // 4xx - Do not retry (Client errors) + // 429 - Retry ("Too Many Requests") + // 5xx - Retry (Server errors) + [100, 199], + [429, 429], + [500, 599], + ]; + + const statusCode = response.statusCode; + debug(`Response status: ${statusCode}`); + + let range; + while ((range = retryRanges.shift())) { + if (statusCode >= range[0] && statusCode <= range[1]) { + // Not a successful status or redirect. + return true; + } + } + }, +}; + +function retryRequest(requestOpts, opts, callback) { + if (typeof requestOpts === 'string') { + requestOpts = {url: requestOpts}; + } + + const streamMode = typeof arguments[arguments.length - 1] !== 'function'; + + if (typeof opts === 'function') { + callback = opts; + } + + const manualCurrentRetryAttemptWasSet = + opts && typeof opts.currentRetryAttempt === 'number'; + opts = extend({}, DEFAULTS, opts); + + if (typeof opts.request === 'undefined') { + throw new Error('A request library must be provided to retry-request.'); + } + + let currentRetryAttempt = opts.currentRetryAttempt; + + let numNoResponseAttempts = 0; + let streamResponseHandled = false; + + let retryStream; + let requestStream; + let delayStream; + + let activeRequest; + const retryRequest = { + abort: function () { + if (activeRequest && activeRequest.abort) { + activeRequest.abort(); + } + }, + }; + + if (streamMode) { + retryStream = new PassThrough({objectMode: opts.objectMode}); + retryStream.abort = resetStreams; + } + + const timeOfFirstRequest = Date.now(); + if (currentRetryAttempt > 0) { + retryAfterDelay(currentRetryAttempt); + } else { + makeRequest(); + } + + if (streamMode) { + return retryStream; + } else { + return retryRequest; + } + + function resetStreams() { + delayStream = null; + + if (requestStream) { + requestStream.abort && requestStream.abort(); + requestStream.cancel && requestStream.cancel(); + + if (requestStream.destroy) { + requestStream.destroy(); + } else if (requestStream.end) { + requestStream.end(); + } + } + } + + function makeRequest() { + let finishHandled = false; + currentRetryAttempt++; + debug(`Current retry attempt: ${currentRetryAttempt}`); + + function handleFinish(args = []) { + if (!finishHandled) { + finishHandled = true; + retryStream.emit('complete', ...args); + } + } + + if (streamMode) { + streamResponseHandled = false; + + delayStream = new PassThrough({objectMode: opts.objectMode}); + requestStream = opts.request(requestOpts); + + setImmediate(() => { + retryStream.emit('request'); + }); + + requestStream + // gRPC via google-cloud-node can emit an `error` as well as a `response` + // Whichever it emits, we run with-- we can't run with both. That's what + // is up with the `streamResponseHandled` tracking. + .on('error', err => { + if (streamResponseHandled) { + return; + } + + streamResponseHandled = true; + onResponse(err); + }) + .on('response', (resp, body) => { + if (streamResponseHandled) { + return; + } + + streamResponseHandled = true; + onResponse(null, resp, body); + }) + .on('complete', (...params) => handleFinish(params)) + .on('finish', (...params) => handleFinish(params)); + + requestStream.pipe(delayStream); + } else { + activeRequest = opts.request(requestOpts, onResponse); + } + } + + function retryAfterDelay(currentRetryAttempt) { + if (streamMode) { + resetStreams(); + } + + const nextRetryDelay = getNextRetryDelay({ + maxRetryDelay: opts.maxRetryDelay, + retryDelayMultiplier: opts.retryDelayMultiplier, + retryNumber: currentRetryAttempt, + timeOfFirstRequest, + totalTimeout: opts.totalTimeout, + }); + debug(`Next retry delay: ${nextRetryDelay}`); + + if (nextRetryDelay <= 0) { + numNoResponseAttempts = opts.noResponseRetries + 1; + return; + } + + setTimeout(makeRequest, nextRetryDelay); + } + + function onResponse(err, response, body) { + // An error such as DNS resolution. + if (err) { + numNoResponseAttempts++; + + if (numNoResponseAttempts <= opts.noResponseRetries) { + retryAfterDelay(numNoResponseAttempts); + } else { + if (streamMode) { + retryStream.emit('error', err); + retryStream.end(); + } else { + callback(err, response, body); + } + } + + return; + } + + // Send the response to see if we should try again. + // NOTE: "currentRetryAttempt" isn't accurate by default, as it counts + // the very first request sent as the first "retry". It is only accurate + // when a user provides their own "currentRetryAttempt" option at + // instantiation. + const adjustedCurrentRetryAttempt = manualCurrentRetryAttemptWasSet + ? currentRetryAttempt + : currentRetryAttempt - 1; + if ( + adjustedCurrentRetryAttempt < opts.retries && + opts.shouldRetryFn(response) + ) { + retryAfterDelay(currentRetryAttempt); + return; + } + + // No more attempts need to be made, just continue on. + if (streamMode) { + retryStream.emit('response', response); + delayStream.pipe(retryStream); + requestStream.on('error', err => { + retryStream.destroy(err); + }); + } else { + callback(err, response, body); + } + } +} + +module.exports = retryRequest; + +function getNextRetryDelay(config) { + const { + maxRetryDelay, + retryDelayMultiplier, + retryNumber, + timeOfFirstRequest, + totalTimeout, + } = config; + + const maxRetryDelayMs = maxRetryDelay * 1000; + const totalTimeoutMs = totalTimeout * 1000; + + const jitter = Math.floor(Math.random() * 1000); + const calculatedNextRetryDelay = + Math.pow(retryDelayMultiplier, retryNumber) * 1000 + jitter; + + const maxAllowableDelayMs = + totalTimeoutMs - (Date.now() - timeOfFirstRequest); + + return Math.min( + calculatedNextRetryDelay, + maxAllowableDelayMs, + maxRetryDelayMs + ); +} + +module.exports.defaults = DEFAULTS; +module.exports.getNextRetryDelay = getNextRetryDelay; + + +/***/ }), + +/***/ 5546: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +module.exports = __nccwpck_require__(7084); + +/***/ }), + +/***/ 7084: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +var RetryOperation = __nccwpck_require__(9538); + +exports.operation = function(options) { + var timeouts = exports.timeouts(options); + return new RetryOperation(timeouts, { + forever: options && (options.forever || options.retries === Infinity), + unref: options && options.unref, + maxRetryTime: options && options.maxRetryTime + }); +}; + +exports.timeouts = function(options) { + if (options instanceof Array) { + return [].concat(options); + } + + var opts = { + retries: 10, + factor: 2, + minTimeout: 1 * 1000, + maxTimeout: Infinity, + randomize: false + }; + for (var key in options) { + opts[key] = options[key]; + } + + if (opts.minTimeout > opts.maxTimeout) { + throw new Error('minTimeout is greater than maxTimeout'); + } + + var timeouts = []; + for (var i = 0; i < opts.retries; i++) { + timeouts.push(this.createTimeout(i, opts)); + } + + if (options && options.forever && !timeouts.length) { + timeouts.push(this.createTimeout(i, opts)); + } + + // sort the array numerically ascending + timeouts.sort(function(a,b) { + return a - b; + }); + + return timeouts; +}; + +exports.createTimeout = function(attempt, opts) { + var random = (opts.randomize) + ? (Math.random() + 1) + : 1; + + var timeout = Math.round(random * Math.max(opts.minTimeout, 1) * Math.pow(opts.factor, attempt)); + timeout = Math.min(timeout, opts.maxTimeout); + + return timeout; +}; + +exports.wrap = function(obj, options, methods) { + if (options instanceof Array) { + methods = options; + options = null; + } + + if (!methods) { + methods = []; + for (var key in obj) { + if (typeof obj[key] === 'function') { + methods.push(key); + } + } + } + + for (var i = 0; i < methods.length; i++) { + var method = methods[i]; + var original = obj[method]; + + obj[method] = function retryWrapper(original) { + var op = exports.operation(options); + var args = Array.prototype.slice.call(arguments, 1); + var callback = args.pop(); + + args.push(function(err) { + if (op.retry(err)) { + return; + } + if (err) { + arguments[0] = op.mainError(); + } + callback.apply(this, arguments); + }); + + op.attempt(function() { + original.apply(obj, args); + }); + }.bind(obj, original); + obj[method].options = options; + } +}; + + +/***/ }), + +/***/ 9538: +/***/ ((module) => { + +function RetryOperation(timeouts, options) { + // Compatibility for the old (timeouts, retryForever) signature + if (typeof options === 'boolean') { + options = { forever: options }; + } + + this._originalTimeouts = JSON.parse(JSON.stringify(timeouts)); + this._timeouts = timeouts; + this._options = options || {}; + this._maxRetryTime = options && options.maxRetryTime || Infinity; + this._fn = null; + this._errors = []; + this._attempts = 1; + this._operationTimeout = null; + this._operationTimeoutCb = null; + this._timeout = null; + this._operationStart = null; + this._timer = null; + + if (this._options.forever) { + this._cachedTimeouts = this._timeouts.slice(0); + } +} +module.exports = RetryOperation; + +RetryOperation.prototype.reset = function() { + this._attempts = 1; + this._timeouts = this._originalTimeouts.slice(0); +} + +RetryOperation.prototype.stop = function() { + if (this._timeout) { + clearTimeout(this._timeout); + } + if (this._timer) { + clearTimeout(this._timer); + } + + this._timeouts = []; + this._cachedTimeouts = null; +}; + +RetryOperation.prototype.retry = function(err) { + if (this._timeout) { + clearTimeout(this._timeout); + } + + if (!err) { + return false; + } + var currentTime = new Date().getTime(); + if (err && currentTime - this._operationStart >= this._maxRetryTime) { + this._errors.push(err); + this._errors.unshift(new Error('RetryOperation timeout occurred')); + return false; + } + + this._errors.push(err); + + var timeout = this._timeouts.shift(); + if (timeout === undefined) { + if (this._cachedTimeouts) { + // retry forever, only keep last error + this._errors.splice(0, this._errors.length - 1); + timeout = this._cachedTimeouts.slice(-1); + } else { + return false; + } + } + + var self = this; + this._timer = setTimeout(function() { + self._attempts++; + + if (self._operationTimeoutCb) { + self._timeout = setTimeout(function() { + self._operationTimeoutCb(self._attempts); + }, self._operationTimeout); + + if (self._options.unref) { + self._timeout.unref(); + } + } + + self._fn(self._attempts); + }, timeout); + + if (this._options.unref) { + this._timer.unref(); + } + + return true; +}; + +RetryOperation.prototype.attempt = function(fn, timeoutOps) { + this._fn = fn; + + if (timeoutOps) { + if (timeoutOps.timeout) { + this._operationTimeout = timeoutOps.timeout; + } + if (timeoutOps.cb) { + this._operationTimeoutCb = timeoutOps.cb; + } + } + + var self = this; + if (this._operationTimeoutCb) { + this._timeout = setTimeout(function() { + self._operationTimeoutCb(); + }, self._operationTimeout); + } + + this._operationStart = new Date().getTime(); + + this._fn(this._attempts); +}; + +RetryOperation.prototype.try = function(fn) { + console.log('Using RetryOperation.try() is deprecated'); + this.attempt(fn); +}; + +RetryOperation.prototype.start = function(fn) { + console.log('Using RetryOperation.start() is deprecated'); + this.attempt(fn); +}; + +RetryOperation.prototype.start = RetryOperation.prototype.try; + +RetryOperation.prototype.errors = function() { + return this._errors; +}; + +RetryOperation.prototype.attempts = function() { + return this._attempts; +}; + +RetryOperation.prototype.mainError = function() { + if (this._errors.length === 0) { + return null; + } + + var counts = {}; + var mainError = null; + var mainErrorCount = 0; + + for (var i = 0; i < this._errors.length; i++) { + var error = this._errors[i]; + var message = error.message; + var count = (counts[message] || 0) + 1; + + counts[message] = count; + + if (count >= mainErrorCount) { + mainError = error; + mainErrorCount = count; + } + } + + return mainError; +}; + + +/***/ }), + +/***/ 3058: +/***/ ((module, exports, __nccwpck_require__) => { + +/* eslint-disable node/no-deprecated-api */ +var buffer = __nccwpck_require__(181) +var Buffer = buffer.Buffer + +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } +} +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer +} + +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) +} + +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) + +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') + } + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + } else { + buf.fill(0) + } + return buf +} + +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) +} + +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return buffer.SlowBuffer(size) +} + + /***/ }), /***/ 2560: @@ -59607,6 +83504,2463 @@ function coerce (version, options) { } +/***/ }), + +/***/ 1546: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var stubs = __nccwpck_require__(3897) + +/* + * StreamEvents can be used 2 ways: + * + * 1: + * function MyStream() { + * require('stream-events').call(this) + * } + * + * 2: + * require('stream-events')(myStream) + */ +function StreamEvents(stream) { + stream = stream || this + + var cfg = { + callthrough: true, + calls: 1 + } + + stubs(stream, '_read', cfg, stream.emit.bind(stream, 'reading')) + stubs(stream, '_write', cfg, stream.emit.bind(stream, 'writing')) + + return stream +} + +module.exports = StreamEvents + + +/***/ }), + +/***/ 4633: +/***/ ((module) => { + +module.exports = shift + +function shift (stream) { + var rs = stream._readableState + if (!rs) return null + return (rs.objectMode || typeof stream._duplexState === 'number') ? stream.read() : stream.read(getStateLength(rs)) +} + +function getStateLength (state) { + if (state.buffer.length) { + var idx = state.bufferIndex || 0 + // Since node 6.3.0 state.buffer is a BufferList not an array + if (state.buffer.head) { + return state.buffer.head.data.length + } else if (state.buffer.length - idx > 0 && state.buffer[idx]) { + return state.buffer[idx].length + } + } + + return state.length +} + + +/***/ }), + +/***/ 634: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + +/**/ + +var Buffer = (__nccwpck_require__(6132).Buffer); +/**/ + +var isEncoding = Buffer.isEncoding || function (encoding) { + encoding = '' + encoding; + switch (encoding && encoding.toLowerCase()) { + case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': + return true; + default: + return false; + } +}; + +function _normalizeEncoding(enc) { + if (!enc) return 'utf8'; + var retried; + while (true) { + switch (enc) { + case 'utf8': + case 'utf-8': + return 'utf8'; + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return 'utf16le'; + case 'latin1': + case 'binary': + return 'latin1'; + case 'base64': + case 'ascii': + case 'hex': + return enc; + default: + if (retried) return; // undefined + enc = ('' + enc).toLowerCase(); + retried = true; + } + } +}; + +// Do not cache `Buffer.isEncoding` when checking encoding names as some +// modules monkey-patch it to support additional encodings +function normalizeEncoding(enc) { + var nenc = _normalizeEncoding(enc); + if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); + return nenc || enc; +} + +// StringDecoder provides an interface for efficiently splitting a series of +// buffers into a series of JS strings without breaking apart multi-byte +// characters. +exports.I = StringDecoder; +function StringDecoder(encoding) { + this.encoding = normalizeEncoding(encoding); + var nb; + switch (this.encoding) { + case 'utf16le': + this.text = utf16Text; + this.end = utf16End; + nb = 4; + break; + case 'utf8': + this.fillLast = utf8FillLast; + nb = 4; + break; + case 'base64': + this.text = base64Text; + this.end = base64End; + nb = 3; + break; + default: + this.write = simpleWrite; + this.end = simpleEnd; + return; + } + this.lastNeed = 0; + this.lastTotal = 0; + this.lastChar = Buffer.allocUnsafe(nb); +} + +StringDecoder.prototype.write = function (buf) { + if (buf.length === 0) return ''; + var r; + var i; + if (this.lastNeed) { + r = this.fillLast(buf); + if (r === undefined) return ''; + i = this.lastNeed; + this.lastNeed = 0; + } else { + i = 0; + } + if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); + return r || ''; +}; + +StringDecoder.prototype.end = utf8End; + +// Returns only complete characters in a Buffer +StringDecoder.prototype.text = utf8Text; + +// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer +StringDecoder.prototype.fillLast = function (buf) { + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); + this.lastNeed -= buf.length; +}; + +// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a +// continuation byte. If an invalid byte is detected, -2 is returned. +function utf8CheckByte(byte) { + if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; + return byte >> 6 === 0x02 ? -1 : -2; +} + +// Checks at most 3 bytes at the end of a Buffer in order to detect an +// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) +// needed to complete the UTF-8 character (if applicable) are returned. +function utf8CheckIncomplete(self, buf, i) { + var j = buf.length - 1; + if (j < i) return 0; + var nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 1; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 2; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) { + if (nb === 2) nb = 0;else self.lastNeed = nb - 3; + } + return nb; + } + return 0; +} + +// Validates as many continuation bytes for a multi-byte UTF-8 character as +// needed or are available. If we see a non-continuation byte where we expect +// one, we "replace" the validated continuation bytes we've seen so far with +// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding +// behavior. The continuation byte check is included three times in the case +// where all of the continuation bytes for a character exist in the same buffer. +// It is also done this way as a slight performance increase instead of using a +// loop. +function utf8CheckExtraBytes(self, buf, p) { + if ((buf[0] & 0xC0) !== 0x80) { + self.lastNeed = 0; + return '\ufffd'; + } + if (self.lastNeed > 1 && buf.length > 1) { + if ((buf[1] & 0xC0) !== 0x80) { + self.lastNeed = 1; + return '\ufffd'; + } + if (self.lastNeed > 2 && buf.length > 2) { + if ((buf[2] & 0xC0) !== 0x80) { + self.lastNeed = 2; + return '\ufffd'; + } + } + } +} + +// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. +function utf8FillLast(buf) { + var p = this.lastTotal - this.lastNeed; + var r = utf8CheckExtraBytes(this, buf, p); + if (r !== undefined) return r; + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, p, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, p, 0, buf.length); + this.lastNeed -= buf.length; +} + +// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a +// partial character, the character's bytes are buffered until the required +// number of bytes are available. +function utf8Text(buf, i) { + var total = utf8CheckIncomplete(this, buf, i); + if (!this.lastNeed) return buf.toString('utf8', i); + this.lastTotal = total; + var end = buf.length - (total - this.lastNeed); + buf.copy(this.lastChar, 0, end); + return buf.toString('utf8', i, end); +} + +// For UTF-8, a replacement character is added when ending on a partial +// character. +function utf8End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + '\ufffd'; + return r; +} + +// UTF-16LE typically needs two bytes per character, but even if we have an even +// number of bytes available, we need to check if we end on a leading/high +// surrogate. In that case, we need to wait for the next two bytes in order to +// decode the last character properly. +function utf16Text(buf, i) { + if ((buf.length - i) % 2 === 0) { + var r = buf.toString('utf16le', i); + if (r) { + var c = r.charCodeAt(r.length - 1); + if (c >= 0xD800 && c <= 0xDBFF) { + this.lastNeed = 2; + this.lastTotal = 4; + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + return r.slice(0, -1); + } + } + return r; + } + this.lastNeed = 1; + this.lastTotal = 2; + this.lastChar[0] = buf[buf.length - 1]; + return buf.toString('utf16le', i, buf.length - 1); +} + +// For UTF-16LE we do not explicitly append special replacement characters if we +// end on a partial character, we simply let v8 handle that. +function utf16End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) { + var end = this.lastTotal - this.lastNeed; + return r + this.lastChar.toString('utf16le', 0, end); + } + return r; +} + +function base64Text(buf, i) { + var n = (buf.length - i) % 3; + if (n === 0) return buf.toString('base64', i); + this.lastNeed = 3 - n; + this.lastTotal = 3; + if (n === 1) { + this.lastChar[0] = buf[buf.length - 1]; + } else { + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + } + return buf.toString('base64', i, buf.length - n); +} + +function base64End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); + return r; +} + +// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) +function simpleWrite(buf) { + return buf.toString(this.encoding); +} + +function simpleEnd(buf) { + return buf && buf.length ? this.write(buf) : ''; +} + +/***/ }), + +/***/ 6132: +/***/ ((module, exports, __nccwpck_require__) => { + +/*! safe-buffer. MIT License. Feross Aboukhadijeh */ +/* eslint-disable node/no-deprecated-api */ +var buffer = __nccwpck_require__(181) +var Buffer = buffer.Buffer + +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } +} +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer +} + +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.prototype = Object.create(Buffer.prototype) + +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) + +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') + } + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + } else { + buf.fill(0) + } + return buf +} + +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) +} + +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return buffer.SlowBuffer(size) +} + + +/***/ }), + +/***/ 6496: +/***/ ((module) => { + +const hexRegex = /^[-+]?0x[a-fA-F0-9]+$/; +const numRegex = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/; +// const octRegex = /^0x[a-z0-9]+/; +// const binRegex = /0x[a-z0-9]+/; + + +const consider = { + hex : true, + // oct: false, + leadingZeros: true, + decimalPoint: "\.", + eNotation: true, + //skipLike: /regex/ +}; + +function toNumber(str, options = {}){ + options = Object.assign({}, consider, options ); + if(!str || typeof str !== "string" ) return str; + + let trimmedStr = str.trim(); + + if(options.skipLike !== undefined && options.skipLike.test(trimmedStr)) return str; + else if(str==="0") return 0; + else if (options.hex && hexRegex.test(trimmedStr)) { + return parse_int(trimmedStr, 16); + // }else if (options.oct && octRegex.test(str)) { + // return Number.parseInt(val, 8); + }else if (trimmedStr.search(/[eE]/)!== -1) { //eNotation + const notation = trimmedStr.match(/^([-\+])?(0*)([0-9]*(\.[0-9]*)?[eE][-\+]?[0-9]+)$/); + // +00.123 => [ , '+', '00', '.123', .. + if(notation){ + // console.log(notation) + if(options.leadingZeros){ //accept with leading zeros + trimmedStr = (notation[1] || "") + notation[3]; + }else{ + if(notation[2] === "0" && notation[3][0]=== "."){ //valid number + }else{ + return str; + } + } + return options.eNotation ? Number(trimmedStr) : str; + }else{ + return str; + } + // }else if (options.parseBin && binRegex.test(str)) { + // return Number.parseInt(val, 2); + }else{ + //separate negative sign, leading zeros, and rest number + const match = numRegex.exec(trimmedStr); + // +00.123 => [ , '+', '00', '.123', .. + if(match){ + const sign = match[1]; + const leadingZeros = match[2]; + let numTrimmedByZeros = trimZeros(match[3]); //complete num without leading zeros + //trim ending zeros for floating number + + if(!options.leadingZeros && leadingZeros.length > 0 && sign && trimmedStr[2] !== ".") return str; //-0123 + else if(!options.leadingZeros && leadingZeros.length > 0 && !sign && trimmedStr[1] !== ".") return str; //0123 + else if(options.leadingZeros && leadingZeros===str) return 0; //00 + + else{//no leading zeros or leading zeros are allowed + const num = Number(trimmedStr); + const numStr = "" + num; + + if(numStr.search(/[eE]/) !== -1){ //given number is long and parsed to eNotation + if(options.eNotation) return num; + else return str; + }else if(trimmedStr.indexOf(".") !== -1){ //floating number + if(numStr === "0" && (numTrimmedByZeros === "") ) return num; //0.0 + else if(numStr === numTrimmedByZeros) return num; //0.456. 0.79000 + else if( sign && numStr === "-"+numTrimmedByZeros) return num; + else return str; + } + + if(leadingZeros){ + return (numTrimmedByZeros === numStr) || (sign+numTrimmedByZeros === numStr) ? num : str + }else { + return (trimmedStr === numStr) || (trimmedStr === sign+numStr) ? num : str + } + } + }else{ //non-numeric string + return str; + } + } +} + +/** + * + * @param {string} numStr without leading zeros + * @returns + */ +function trimZeros(numStr){ + if(numStr && numStr.indexOf(".") !== -1){//float + numStr = numStr.replace(/0+$/, ""); //remove ending zeros + if(numStr === ".") numStr = "0"; + else if(numStr[0] === ".") numStr = "0"+numStr; + else if(numStr[numStr.length-1] === ".") numStr = numStr.substr(0,numStr.length-1); + return numStr; + } + return numStr; +} + +function parse_int(numStr, base){ + //polyfill + if(parseInt) return parseInt(numStr, base); + else if(Number.parseInt) return Number.parseInt(numStr, base); + else if(window && window.parseInt) return window.parseInt(numStr, base); + else throw new Error("parseInt, Number.parseInt, window.parseInt are not supported") +} + +module.exports = toNumber; + +/***/ }), + +/***/ 3897: +/***/ ((module) => { + +"use strict"; + + +module.exports = function stubs(obj, method, cfg, stub) { + if (!obj || !method || !obj[method]) + throw new Error('You must provide an object and a key for an existing method') + + if (!stub) { + stub = cfg + cfg = {} + } + + stub = stub || function() {} + + cfg.callthrough = cfg.callthrough || false + cfg.calls = cfg.calls || 0 + + var norevert = cfg.calls === 0 + + var cached = obj[method].bind(obj) + + obj[method] = function() { + var args = [].slice.call(arguments) + var returnVal + + if (cfg.callthrough) + returnVal = cached.apply(obj, args) + + returnVal = stub.apply(obj, args) || returnVal + + if (!norevert && --cfg.calls === 0) + obj[method] = cached + + return returnVal + } +} + + +/***/ }), + +/***/ 1450: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +const os = __nccwpck_require__(857); +const tty = __nccwpck_require__(2018); +const hasFlag = __nccwpck_require__(3813); + +const {env} = process; + +let forceColor; +if (hasFlag('no-color') || + hasFlag('no-colors') || + hasFlag('color=false') || + hasFlag('color=never')) { + forceColor = 0; +} else if (hasFlag('color') || + hasFlag('colors') || + hasFlag('color=true') || + hasFlag('color=always')) { + forceColor = 1; +} + +if ('FORCE_COLOR' in env) { + if (env.FORCE_COLOR === 'true') { + forceColor = 1; + } else if (env.FORCE_COLOR === 'false') { + forceColor = 0; + } else { + forceColor = env.FORCE_COLOR.length === 0 ? 1 : Math.min(parseInt(env.FORCE_COLOR, 10), 3); + } +} + +function translateLevel(level) { + if (level === 0) { + return false; + } + + return { + level, + hasBasic: true, + has256: level >= 2, + has16m: level >= 3 + }; +} + +function supportsColor(haveStream, streamIsTTY) { + if (forceColor === 0) { + return 0; + } + + if (hasFlag('color=16m') || + hasFlag('color=full') || + hasFlag('color=truecolor')) { + return 3; + } + + if (hasFlag('color=256')) { + return 2; + } + + if (haveStream && !streamIsTTY && forceColor === undefined) { + return 0; + } + + const min = forceColor || 0; + + if (env.TERM === 'dumb') { + return min; + } + + if (process.platform === 'win32') { + // Windows 10 build 10586 is the first Windows release that supports 256 colors. + // Windows 10 build 14931 is the first release that supports 16m/TrueColor. + const osRelease = os.release().split('.'); + if ( + Number(osRelease[0]) >= 10 && + Number(osRelease[2]) >= 10586 + ) { + return Number(osRelease[2]) >= 14931 ? 3 : 2; + } + + return 1; + } + + if ('CI' in env) { + if (['TRAVIS', 'CIRCLECI', 'APPVEYOR', 'GITLAB_CI', 'GITHUB_ACTIONS', 'BUILDKITE'].some(sign => sign in env) || env.CI_NAME === 'codeship') { + return 1; + } + + return min; + } + + if ('TEAMCITY_VERSION' in env) { + return /^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0; + } + + if (env.COLORTERM === 'truecolor') { + return 3; + } + + if ('TERM_PROGRAM' in env) { + const version = parseInt((env.TERM_PROGRAM_VERSION || '').split('.')[0], 10); + + switch (env.TERM_PROGRAM) { + case 'iTerm.app': + return version >= 3 ? 3 : 2; + case 'Apple_Terminal': + return 2; + // No default + } + } + + if (/-256(color)?$/i.test(env.TERM)) { + return 2; + } + + if (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) { + return 1; + } + + if ('COLORTERM' in env) { + return 1; + } + + return min; +} + +function getSupportLevel(stream) { + const level = supportsColor(stream, stream && stream.isTTY); + return translateLevel(level); +} + +module.exports = { + supportsColor: getSupportLevel, + stdout: translateLevel(supportsColor(true, tty.isatty(1))), + stderr: translateLevel(supportsColor(true, tty.isatty(2))) +}; + + +/***/ }), + +/***/ 7745: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/** + * @license + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.TeenyStatistics = exports.TeenyStatisticsWarning = void 0; +/** + * @class TeenyStatisticsWarning + * @extends Error + * @description While an error, is used for emitting warnings when + * meeting certain configured thresholds. + * @see process.emitWarning + */ +class TeenyStatisticsWarning extends Error { + /** + * @param {string} message + */ + constructor(message) { + super(message); + this.threshold = 0; + this.type = ''; + this.value = 0; + this.name = this.constructor.name; + Error.captureStackTrace(this, this.constructor); + } +} +exports.TeenyStatisticsWarning = TeenyStatisticsWarning; +TeenyStatisticsWarning.CONCURRENT_REQUESTS = 'ConcurrentRequestsExceededWarning'; +/** + * @class TeenyStatistics + * @description Maintain various statistics internal to teeny-request. Tracking + * is not automatic and must be instrumented within teeny-request. + */ +class TeenyStatistics { + /** + * @param {TeenyStatisticsOptions} [opts] + */ + constructor(opts) { + /** + * @type {number} + * @private + * @default 0 + */ + this._concurrentRequests = 0; + /** + * @type {boolean} + * @private + * @default false + */ + this._didConcurrentRequestWarn = false; + this._options = TeenyStatistics._prepareOptions(opts); + } + /** + * Returns a copy of the current options. + * @return {TeenyStatisticsOptions} + */ + getOptions() { + return Object.assign({}, this._options); + } + /** + * Change configured statistics options. This will not preserve unspecified + * options that were previously specified, i.e. this is a reset of options. + * @param {TeenyStatisticsOptions} [opts] + * @returns {TeenyStatisticsConfig} The previous options. + * @see _prepareOptions + */ + setOptions(opts) { + const oldOpts = this._options; + this._options = TeenyStatistics._prepareOptions(opts); + return oldOpts; + } + /** + * @readonly + * @return {TeenyStatisticsCounters} + */ + get counters() { + return { + concurrentRequests: this._concurrentRequests, + }; + } + /** + * @description Should call this right before making a request. + */ + requestStarting() { + this._concurrentRequests++; + if (this._options.concurrentRequests > 0 && + this._concurrentRequests >= this._options.concurrentRequests && + !this._didConcurrentRequestWarn) { + this._didConcurrentRequestWarn = true; + const warning = new TeenyStatisticsWarning('Possible excessive concurrent requests detected. ' + + this._concurrentRequests + + ' requests in-flight, which exceeds the configured threshold of ' + + this._options.concurrentRequests + + '. Use the TEENY_REQUEST_WARN_CONCURRENT_REQUESTS environment ' + + 'variable or the concurrentRequests option of teeny-request to ' + + 'increase or disable (0) this warning.'); + warning.type = TeenyStatisticsWarning.CONCURRENT_REQUESTS; + warning.value = this._concurrentRequests; + warning.threshold = this._options.concurrentRequests; + process.emitWarning(warning); + } + } + /** + * @description When using `requestStarting`, call this after the request + * has finished. + */ + requestFinished() { + // TODO negative? + this._concurrentRequests--; + } + /** + * Configuration Precedence: + * 1. Dependency inversion via defined option. + * 2. Global numeric environment variable. + * 3. Built-in default. + * This will not preserve unspecified options previously specified. + * @param {TeenyStatisticsOptions} [opts] + * @returns {TeenyStatisticsOptions} + * @private + */ + static _prepareOptions({ concurrentRequests: diConcurrentRequests, } = {}) { + let concurrentRequests = this.DEFAULT_WARN_CONCURRENT_REQUESTS; + const envConcurrentRequests = Number(process.env.TEENY_REQUEST_WARN_CONCURRENT_REQUESTS); + if (diConcurrentRequests !== undefined) { + concurrentRequests = diConcurrentRequests; + } + else if (!Number.isNaN(envConcurrentRequests)) { + concurrentRequests = envConcurrentRequests; + } + return { concurrentRequests }; + } +} +exports.TeenyStatistics = TeenyStatistics; +/** + * @description A default threshold representing when to warn about excessive + * in-flight/concurrent requests. + * @type {number} + * @static + * @readonly + * @default 5000 + */ +TeenyStatistics.DEFAULT_WARN_CONCURRENT_REQUESTS = 5000; +//# sourceMappingURL=TeenyStatistics.js.map + +/***/ }), + +/***/ 4003: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/** + * @license + * Copyright 2019 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getAgent = exports.pool = void 0; +const http_1 = __nccwpck_require__(8611); +const https_1 = __nccwpck_require__(5692); +// eslint-disable-next-line node/no-deprecated-api +const url_1 = __nccwpck_require__(7016); +exports.pool = new Map(); +/** + * Determines if a proxy should be considered based on the environment. + * + * @param uri The request uri + * @returns {boolean} + */ +function shouldUseProxyForURI(uri) { + const noProxyEnv = process.env.NO_PROXY || process.env.no_proxy; + if (!noProxyEnv) { + return true; + } + const givenURI = new URL(uri); + for (const noProxyRaw of noProxyEnv.split(',')) { + const noProxy = noProxyRaw.trim(); + if (noProxy === givenURI.origin || noProxy === givenURI.hostname) { + return false; + } + else if (noProxy.startsWith('*.') || noProxy.startsWith('.')) { + const noProxyWildcard = noProxy.replace(/^\*\./, '.'); + if (givenURI.hostname.endsWith(noProxyWildcard)) { + return false; + } + } + } + return true; +} +/** + * Returns a custom request Agent if one is found, otherwise returns undefined + * which will result in the global http(s) Agent being used. + * @private + * @param {string} uri The request uri + * @param {Options} reqOpts The request options + * @returns {HttpAnyAgent|undefined} + */ +function getAgent(uri, reqOpts) { + const isHttp = uri.startsWith('http://'); + const proxy = reqOpts.proxy || + process.env.HTTP_PROXY || + process.env.http_proxy || + process.env.HTTPS_PROXY || + process.env.https_proxy; + const poolOptions = Object.assign({}, reqOpts.pool); + const manuallyProvidedProxy = !!reqOpts.proxy; + const shouldUseProxy = manuallyProvidedProxy || shouldUseProxyForURI(uri); + if (proxy && shouldUseProxy) { + // tslint:disable-next-line variable-name + const Agent = isHttp + ? __nccwpck_require__(1970) + : __nccwpck_require__(6518); + const proxyOpts = { ...(0, url_1.parse)(proxy), ...poolOptions }; + return new Agent(proxyOpts); + } + let key = isHttp ? 'http' : 'https'; + if (reqOpts.forever) { + key += ':forever'; + if (!exports.pool.has(key)) { + // tslint:disable-next-line variable-name + const Agent = isHttp ? http_1.Agent : https_1.Agent; + exports.pool.set(key, new Agent({ ...poolOptions, keepAlive: true })); + } + } + return exports.pool.get(key); +} +exports.getAgent = getAgent; +//# sourceMappingURL=agents.js.map + +/***/ }), + +/***/ 321: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/** + * @license + * Copyright 2018 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.teenyRequest = exports.RequestError = void 0; +const node_fetch_1 = __nccwpck_require__(6705); +const stream_1 = __nccwpck_require__(2203); +const uuid = __nccwpck_require__(8993); +const agents_1 = __nccwpck_require__(4003); +const TeenyStatistics_1 = __nccwpck_require__(7745); +// eslint-disable-next-line @typescript-eslint/no-var-requires +const streamEvents = __nccwpck_require__(1546); +class RequestError extends Error { +} +exports.RequestError = RequestError; +/** + * Convert options from Request to Fetch format + * @private + * @param reqOpts Request options + */ +function requestToFetchOptions(reqOpts) { + const options = { + method: reqOpts.method || 'GET', + ...(reqOpts.timeout && { timeout: reqOpts.timeout }), + ...(typeof reqOpts.gzip === 'boolean' && { compress: reqOpts.gzip }), + }; + if (typeof reqOpts.json === 'object') { + // Add Content-type: application/json header + reqOpts.headers = reqOpts.headers || {}; + reqOpts.headers['Content-Type'] = 'application/json'; + // Set body to JSON representation of value + options.body = JSON.stringify(reqOpts.json); + } + else { + if (Buffer.isBuffer(reqOpts.body)) { + options.body = reqOpts.body; + } + else if (typeof reqOpts.body !== 'string') { + options.body = JSON.stringify(reqOpts.body); + } + else { + options.body = reqOpts.body; + } + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + options.headers = reqOpts.headers; + let uri = (reqOpts.uri || + reqOpts.url); + if (!uri) { + throw new Error('Missing uri or url in reqOpts.'); + } + if (reqOpts.useQuerystring === true || typeof reqOpts.qs === 'object') { + // eslint-disable-next-line @typescript-eslint/no-var-requires + const qs = __nccwpck_require__(3480); + const params = qs.stringify(reqOpts.qs); + uri = uri + '?' + params; + } + options.agent = (0, agents_1.getAgent)(uri, reqOpts); + return { uri, options }; +} +/** + * Convert a response from `fetch` to `request` format. + * @private + * @param opts The `request` options used to create the request. + * @param res The Fetch response + * @returns A `request` response object + */ +function fetchToRequestResponse(opts, res) { + const request = {}; + request.agent = opts.agent || false; + request.headers = (opts.headers || {}); + request.href = res.url; + // headers need to be converted from a map to an obj + const resHeaders = {}; + res.headers.forEach((value, key) => (resHeaders[key] = value)); + const response = Object.assign(res.body, { + statusCode: res.status, + statusMessage: res.statusText, + request, + body: res.body, + headers: resHeaders, + toJSON: () => ({ headers: resHeaders }), + }); + return response; +} +/** + * Create POST body from two parts as multipart/related content-type + * @private + * @param boundary + * @param multipart + */ +function createMultipartStream(boundary, multipart) { + const finale = `--${boundary}--`; + const stream = new stream_1.PassThrough(); + for (const part of multipart) { + const preamble = `--${boundary}\r\nContent-Type: ${part['Content-Type']}\r\n\r\n`; + stream.write(preamble); + if (typeof part.body === 'string') { + stream.write(part.body); + stream.write('\r\n'); + } + else { + part.body.pipe(stream, { end: false }); + part.body.on('end', () => { + stream.write('\r\n'); + stream.write(finale); + stream.end(); + }); + } + } + return stream; +} +function teenyRequest(reqOpts, callback) { + const { uri, options } = requestToFetchOptions(reqOpts); + const multipart = reqOpts.multipart; + if (reqOpts.multipart && multipart.length === 2) { + if (!callback) { + // TODO: add support for multipart uploads through streaming + throw new Error('Multipart without callback is not implemented.'); + } + const boundary = uuid.v4(); + options.headers['Content-Type'] = `multipart/related; boundary=${boundary}`; + options.body = createMultipartStream(boundary, multipart); + // Multipart upload + teenyRequest.stats.requestStarting(); + (0, node_fetch_1.default)(uri, options).then(res => { + teenyRequest.stats.requestFinished(); + const header = res.headers.get('content-type'); + const response = fetchToRequestResponse(options, res); + const body = response.body; + if (header === 'application/json' || + header === 'application/json; charset=utf-8') { + res.json().then(json => { + response.body = json; + callback(null, response, json); + }, (err) => { + callback(err, response, body); + }); + return; + } + res.text().then(text => { + response.body = text; + callback(null, response, text); + }, err => { + callback(err, response, body); + }); + }, err => { + teenyRequest.stats.requestFinished(); + callback(err, null, null); + }); + return; + } + if (callback === undefined) { + // Stream mode + const requestStream = streamEvents(new stream_1.PassThrough()); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let responseStream; + requestStream.once('reading', () => { + if (responseStream) { + (0, stream_1.pipeline)(responseStream, requestStream, () => { }); + } + else { + requestStream.once('response', () => { + (0, stream_1.pipeline)(responseStream, requestStream, () => { }); + }); + } + }); + options.compress = false; + teenyRequest.stats.requestStarting(); + (0, node_fetch_1.default)(uri, options).then(res => { + teenyRequest.stats.requestFinished(); + responseStream = res.body; + responseStream.on('error', (err) => { + requestStream.emit('error', err); + }); + const response = fetchToRequestResponse(options, res); + requestStream.emit('response', response); + }, err => { + teenyRequest.stats.requestFinished(); + requestStream.emit('error', err); + }); + // fetch doesn't supply the raw HTTP stream, instead it + // returns a PassThrough piped from the HTTP response + // stream. + return requestStream; + } + // GET or POST with callback + teenyRequest.stats.requestStarting(); + (0, node_fetch_1.default)(uri, options).then(res => { + teenyRequest.stats.requestFinished(); + const header = res.headers.get('content-type'); + const response = fetchToRequestResponse(options, res); + const body = response.body; + if (header === 'application/json' || + header === 'application/json; charset=utf-8') { + if (response.statusCode === 204) { + // Probably a DELETE + callback(null, response, body); + return; + } + res.json().then(json => { + response.body = json; + callback(null, response, json); + }, err => { + callback(err, response, body); + }); + return; + } + res.text().then(text => { + const response = fetchToRequestResponse(options, res); + response.body = text; + callback(null, response, text); + }, err => { + callback(err, response, body); + }); + }, err => { + teenyRequest.stats.requestFinished(); + callback(err, null, null); + }); + return; +} +exports.teenyRequest = teenyRequest; +teenyRequest.defaults = (defaults) => { + return (reqOpts, callback) => { + const opts = { ...defaults, ...reqOpts }; + if (callback === undefined) { + return teenyRequest(opts); + } + teenyRequest(opts, callback); + }; +}; +/** + * Single instance of an interface for keeping track of things. + */ +teenyRequest.stats = new TeenyStatistics_1.TeenyStatistics(); +teenyRequest.resetStats = () => { + teenyRequest.stats = new TeenyStatistics_1.TeenyStatistics(teenyRequest.stats.getOptions()); +}; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 7954: +/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const events_1 = __nccwpck_require__(4434); +const debug_1 = __importDefault(__nccwpck_require__(2830)); +const promisify_1 = __importDefault(__nccwpck_require__(4446)); +const debug = debug_1.default('agent-base'); +function isAgent(v) { + return Boolean(v) && typeof v.addRequest === 'function'; +} +function isSecureEndpoint() { + const { stack } = new Error(); + if (typeof stack !== 'string') + return false; + return stack.split('\n').some(l => l.indexOf('(https.js:') !== -1 || l.indexOf('node:https:') !== -1); +} +function createAgent(callback, opts) { + return new createAgent.Agent(callback, opts); +} +(function (createAgent) { + /** + * Base `http.Agent` implementation. + * No pooling/keep-alive is implemented by default. + * + * @param {Function} callback + * @api public + */ + class Agent extends events_1.EventEmitter { + constructor(callback, _opts) { + super(); + let opts = _opts; + if (typeof callback === 'function') { + this.callback = callback; + } + else if (callback) { + opts = callback; + } + // Timeout for the socket to be returned from the callback + this.timeout = null; + if (opts && typeof opts.timeout === 'number') { + this.timeout = opts.timeout; + } + // These aren't actually used by `agent-base`, but are required + // for the TypeScript definition files in `@types/node` :/ + this.maxFreeSockets = 1; + this.maxSockets = 1; + this.maxTotalSockets = Infinity; + this.sockets = {}; + this.freeSockets = {}; + this.requests = {}; + this.options = {}; + } + get defaultPort() { + if (typeof this.explicitDefaultPort === 'number') { + return this.explicitDefaultPort; + } + return isSecureEndpoint() ? 443 : 80; + } + set defaultPort(v) { + this.explicitDefaultPort = v; + } + get protocol() { + if (typeof this.explicitProtocol === 'string') { + return this.explicitProtocol; + } + return isSecureEndpoint() ? 'https:' : 'http:'; + } + set protocol(v) { + this.explicitProtocol = v; + } + callback(req, opts, fn) { + throw new Error('"agent-base" has no default implementation, you must subclass and override `callback()`'); + } + /** + * Called by node-core's "_http_client.js" module when creating + * a new HTTP request with this Agent instance. + * + * @api public + */ + addRequest(req, _opts) { + const opts = Object.assign({}, _opts); + if (typeof opts.secureEndpoint !== 'boolean') { + opts.secureEndpoint = isSecureEndpoint(); + } + if (opts.host == null) { + opts.host = 'localhost'; + } + if (opts.port == null) { + opts.port = opts.secureEndpoint ? 443 : 80; + } + if (opts.protocol == null) { + opts.protocol = opts.secureEndpoint ? 'https:' : 'http:'; + } + if (opts.host && opts.path) { + // If both a `host` and `path` are specified then it's most + // likely the result of a `url.parse()` call... we need to + // remove the `path` portion so that `net.connect()` doesn't + // attempt to open that as a unix socket file. + delete opts.path; + } + delete opts.agent; + delete opts.hostname; + delete opts._defaultAgent; + delete opts.defaultPort; + delete opts.createConnection; + // Hint to use "Connection: close" + // XXX: non-documented `http` module API :( + req._last = true; + req.shouldKeepAlive = false; + let timedOut = false; + let timeoutId = null; + const timeoutMs = opts.timeout || this.timeout; + const onerror = (err) => { + if (req._hadError) + return; + req.emit('error', err); + // For Safety. Some additional errors might fire later on + // and we need to make sure we don't double-fire the error event. + req._hadError = true; + }; + const ontimeout = () => { + timeoutId = null; + timedOut = true; + const err = new Error(`A "socket" was not created for HTTP request before ${timeoutMs}ms`); + err.code = 'ETIMEOUT'; + onerror(err); + }; + const callbackError = (err) => { + if (timedOut) + return; + if (timeoutId !== null) { + clearTimeout(timeoutId); + timeoutId = null; + } + onerror(err); + }; + const onsocket = (socket) => { + if (timedOut) + return; + if (timeoutId != null) { + clearTimeout(timeoutId); + timeoutId = null; + } + if (isAgent(socket)) { + // `socket` is actually an `http.Agent` instance, so + // relinquish responsibility for this `req` to the Agent + // from here on + debug('Callback returned another Agent instance %o', socket.constructor.name); + socket.addRequest(req, opts); + return; + } + if (socket) { + socket.once('free', () => { + this.freeSocket(socket, opts); + }); + req.onSocket(socket); + return; + } + const err = new Error(`no Duplex stream was returned to agent-base for \`${req.method} ${req.path}\``); + onerror(err); + }; + if (typeof this.callback !== 'function') { + onerror(new Error('`callback` is not defined')); + return; + } + if (!this.promisifiedCallback) { + if (this.callback.length >= 3) { + debug('Converting legacy callback function to promise'); + this.promisifiedCallback = promisify_1.default(this.callback); + } + else { + this.promisifiedCallback = this.callback; + } + } + if (typeof timeoutMs === 'number' && timeoutMs > 0) { + timeoutId = setTimeout(ontimeout, timeoutMs); + } + if ('port' in opts && typeof opts.port !== 'number') { + opts.port = Number(opts.port); + } + try { + debug('Resolving socket for %o request: %o', opts.protocol, `${req.method} ${req.path}`); + Promise.resolve(this.promisifiedCallback(req, opts)).then(onsocket, callbackError); + } + catch (err) { + Promise.reject(err).catch(callbackError); + } + } + freeSocket(socket, opts) { + debug('Freeing socket %o %o', socket.constructor.name, opts); + socket.destroy(); + } + destroy() { + debug('Destroying agent %o', this.constructor.name); + } + } + createAgent.Agent = Agent; + // So that `instanceof` works correctly + createAgent.prototype = createAgent.Agent.prototype; +})(createAgent || (createAgent = {})); +module.exports = createAgent; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 4446: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +function promisify(fn) { + return function (req, opts) { + return new Promise((resolve, reject) => { + fn.call(this, req, opts, (err, rtn) => { + if (err) { + reject(err); + } + else { + resolve(rtn); + } + }); + }); + }; +} +exports["default"] = promisify; +//# sourceMappingURL=promisify.js.map + +/***/ }), + +/***/ 5299: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const net_1 = __importDefault(__nccwpck_require__(9278)); +const tls_1 = __importDefault(__nccwpck_require__(4756)); +const url_1 = __importDefault(__nccwpck_require__(7016)); +const assert_1 = __importDefault(__nccwpck_require__(2613)); +const debug_1 = __importDefault(__nccwpck_require__(2830)); +const agent_base_1 = __nccwpck_require__(7954); +const parse_proxy_response_1 = __importDefault(__nccwpck_require__(7742)); +const debug = debug_1.default('https-proxy-agent:agent'); +/** + * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to + * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests. + * + * Outgoing HTTP requests are first tunneled through the proxy server using the + * `CONNECT` HTTP request method to establish a connection to the proxy server, + * and then the proxy server connects to the destination target and issues the + * HTTP request from the proxy server. + * + * `https:` requests have their socket connection upgraded to TLS once + * the connection to the proxy server has been established. + * + * @api public + */ +class HttpsProxyAgent extends agent_base_1.Agent { + constructor(_opts) { + let opts; + if (typeof _opts === 'string') { + opts = url_1.default.parse(_opts); + } + else { + opts = _opts; + } + if (!opts) { + throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!'); + } + debug('creating new HttpsProxyAgent instance: %o', opts); + super(opts); + const proxy = Object.assign({}, opts); + // If `true`, then connect to the proxy server over TLS. + // Defaults to `false`. + this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol); + // Prefer `hostname` over `host`, and set the `port` if needed. + proxy.host = proxy.hostname || proxy.host; + if (typeof proxy.port === 'string') { + proxy.port = parseInt(proxy.port, 10); + } + if (!proxy.port && proxy.host) { + proxy.port = this.secureProxy ? 443 : 80; + } + // ALPN is supported by Node.js >= v5. + // attempt to negotiate http/1.1 for proxy servers that support http/2 + if (this.secureProxy && !('ALPNProtocols' in proxy)) { + proxy.ALPNProtocols = ['http 1.1']; + } + if (proxy.host && proxy.path) { + // If both a `host` and `path` are specified then it's most likely + // the result of a `url.parse()` call... we need to remove the + // `path` portion so that `net.connect()` doesn't attempt to open + // that as a Unix socket file. + delete proxy.path; + delete proxy.pathname; + } + this.proxy = proxy; + } + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + * + * @api protected + */ + callback(req, opts) { + return __awaiter(this, void 0, void 0, function* () { + const { proxy, secureProxy } = this; + // Create a socket connection to the proxy server. + let socket; + if (secureProxy) { + debug('Creating `tls.Socket`: %o', proxy); + socket = tls_1.default.connect(proxy); + } + else { + debug('Creating `net.Socket`: %o', proxy); + socket = net_1.default.connect(proxy); + } + const headers = Object.assign({}, proxy.headers); + const hostname = `${opts.host}:${opts.port}`; + let payload = `CONNECT ${hostname} HTTP/1.1\r\n`; + // Inject the `Proxy-Authorization` header if necessary. + if (proxy.auth) { + headers['Proxy-Authorization'] = `Basic ${Buffer.from(proxy.auth).toString('base64')}`; + } + // The `Host` header should only include the port + // number when it is not the default port. + let { host, port, secureEndpoint } = opts; + if (!isDefaultPort(port, secureEndpoint)) { + host += `:${port}`; + } + headers.Host = host; + headers.Connection = 'close'; + for (const name of Object.keys(headers)) { + payload += `${name}: ${headers[name]}\r\n`; + } + const proxyResponsePromise = parse_proxy_response_1.default(socket); + socket.write(`${payload}\r\n`); + const { statusCode, buffered } = yield proxyResponsePromise; + if (statusCode === 200) { + req.once('socket', resume); + if (opts.secureEndpoint) { + // The proxy is connecting to a TLS server, so upgrade + // this socket connection to a TLS connection. + debug('Upgrading socket connection to TLS'); + const servername = opts.servername || opts.host; + return tls_1.default.connect(Object.assign(Object.assign({}, omit(opts, 'host', 'hostname', 'path', 'port')), { socket, + servername })); + } + return socket; + } + // Some other status code that's not 200... need to re-play the HTTP + // header "data" events onto the socket once the HTTP machinery is + // attached so that the node core `http` can parse and handle the + // error status code. + // Close the original socket, and a new "fake" socket is returned + // instead, so that the proxy doesn't get the HTTP request + // written to it (which may contain `Authorization` headers or other + // sensitive data). + // + // See: https://hackerone.com/reports/541502 + socket.destroy(); + const fakeSocket = new net_1.default.Socket({ writable: false }); + fakeSocket.readable = true; + // Need to wait for the "socket" event to re-play the "data" events. + req.once('socket', (s) => { + debug('replaying proxy buffer for failed request'); + assert_1.default(s.listenerCount('data') > 0); + // Replay the "buffered" Buffer onto the fake `socket`, since at + // this point the HTTP module machinery has been hooked up for + // the user. + s.push(buffered); + s.push(null); + }); + return fakeSocket; + }); + } +} +exports["default"] = HttpsProxyAgent; +function resume(socket) { + socket.resume(); +} +function isDefaultPort(port, secure) { + return Boolean((!secure && port === 80) || (secure && port === 443)); +} +function isHTTPS(protocol) { + return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false; +} +function omit(obj, ...keys) { + const ret = {}; + let key; + for (key in obj) { + if (!keys.includes(key)) { + ret[key] = obj[key]; + } + } + return ret; +} +//# sourceMappingURL=agent.js.map + +/***/ }), + +/***/ 6518: +/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const agent_1 = __importDefault(__nccwpck_require__(5299)); +function createHttpsProxyAgent(opts) { + return new agent_1.default(opts); +} +(function (createHttpsProxyAgent) { + createHttpsProxyAgent.HttpsProxyAgent = agent_1.default; + createHttpsProxyAgent.prototype = agent_1.default.prototype; +})(createHttpsProxyAgent || (createHttpsProxyAgent = {})); +module.exports = createHttpsProxyAgent; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 7742: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const debug_1 = __importDefault(__nccwpck_require__(2830)); +const debug = debug_1.default('https-proxy-agent:parse-proxy-response'); +function parseProxyResponse(socket) { + return new Promise((resolve, reject) => { + // we need to buffer any HTTP traffic that happens with the proxy before we get + // the CONNECT response, so that if the response is anything other than an "200" + // response code, then we can re-play the "data" events on the socket once the + // HTTP parser is hooked up... + let buffersLength = 0; + const buffers = []; + function read() { + const b = socket.read(); + if (b) + ondata(b); + else + socket.once('readable', read); + } + function cleanup() { + socket.removeListener('end', onend); + socket.removeListener('error', onerror); + socket.removeListener('close', onclose); + socket.removeListener('readable', read); + } + function onclose(err) { + debug('onclose had error %o', err); + } + function onend() { + debug('onend'); + } + function onerror(err) { + cleanup(); + debug('onerror %o', err); + reject(err); + } + function ondata(b) { + buffers.push(b); + buffersLength += b.length; + const buffered = Buffer.concat(buffers, buffersLength); + const endOfHeaders = buffered.indexOf('\r\n\r\n'); + if (endOfHeaders === -1) { + // keep buffering + debug('have not received end of HTTP headers yet...'); + read(); + return; + } + const firstLine = buffered.toString('ascii', 0, buffered.indexOf('\r\n')); + const statusCode = +firstLine.split(' ')[1]; + debug('got proxy server response: %o', firstLine); + resolve({ + statusCode, + buffered + }); + } + socket.on('error', onerror); + socket.on('close', onclose); + socket.on('end', onend); + read(); + }); +} +exports["default"] = parseProxyResponse; +//# sourceMappingURL=parse-proxy-response.js.map + +/***/ }), + +/***/ 8993: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +Object.defineProperty(exports, "NIL", ({ + enumerable: true, + get: function () { + return _nil.default; + } +})); +Object.defineProperty(exports, "parse", ({ + enumerable: true, + get: function () { + return _parse.default; + } +})); +Object.defineProperty(exports, "stringify", ({ + enumerable: true, + get: function () { + return _stringify.default; + } +})); +Object.defineProperty(exports, "v1", ({ + enumerable: true, + get: function () { + return _v.default; + } +})); +Object.defineProperty(exports, "v3", ({ + enumerable: true, + get: function () { + return _v2.default; + } +})); +Object.defineProperty(exports, "v4", ({ + enumerable: true, + get: function () { + return _v3.default; + } +})); +Object.defineProperty(exports, "v5", ({ + enumerable: true, + get: function () { + return _v4.default; + } +})); +Object.defineProperty(exports, "validate", ({ + enumerable: true, + get: function () { + return _validate.default; + } +})); +Object.defineProperty(exports, "version", ({ + enumerable: true, + get: function () { + return _version.default; + } +})); + +var _v = _interopRequireDefault(__nccwpck_require__(4684)); + +var _v2 = _interopRequireDefault(__nccwpck_require__(3142)); + +var _v3 = _interopRequireDefault(__nccwpck_require__(9655)); + +var _v4 = _interopRequireDefault(__nccwpck_require__(880)); + +var _nil = _interopRequireDefault(__nccwpck_require__(194)); + +var _version = _interopRequireDefault(__nccwpck_require__(6257)); + +var _validate = _interopRequireDefault(__nccwpck_require__(1579)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(1400)); + +var _parse = _interopRequireDefault(__nccwpck_require__(7814)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/***/ }), + +/***/ 8061: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); +} + +var _default = md5; +exports["default"] = _default; + +/***/ }), + +/***/ 7966: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var _default = { + randomUUID: _crypto.default.randomUUID +}; +exports["default"] = _default; + +/***/ }), + +/***/ 194: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports["default"] = _default; + +/***/ }), + +/***/ 7814: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(1579)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports["default"] = _default; + +/***/ }), + +/***/ 1118: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports["default"] = _default; + +/***/ }), + +/***/ 8540: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = rng; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; + +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); + + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} + +/***/ }), + +/***/ 1352: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('sha1').update(bytes).digest(); +} + +var _default = sha1; +exports["default"] = _default; + +/***/ }), + +/***/ 1400: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +exports.unsafeStringify = unsafeStringify; + +var _validate = _interopRequireDefault(__nccwpck_require__(1579)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).slice(1)); +} + +function unsafeStringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]; +} + +function stringify(arr, offset = 0) { + const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports["default"] = _default; + +/***/ }), + +/***/ 4684: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(8540)); + +var _stringify = __nccwpck_require__(1400); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.unsafeStringify)(b); +} + +var _default = v1; +exports["default"] = _default; + +/***/ }), + +/***/ 3142: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(4575)); + +var _md = _interopRequireDefault(__nccwpck_require__(8061)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports["default"] = _default; + +/***/ }), + +/***/ 4575: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports.URL = exports.DNS = void 0; +exports["default"] = v35; + +var _stringify = __nccwpck_require__(1400); + +var _parse = _interopRequireDefault(__nccwpck_require__(7814)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function v35(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + var _namespace; + + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} + +/***/ }), + +/***/ 9655: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _native = _interopRequireDefault(__nccwpck_require__(7966)); + +var _rng = _interopRequireDefault(__nccwpck_require__(8540)); + +var _stringify = __nccwpck_require__(1400); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + if (_native.default.randomUUID && !buf && !options) { + return _native.default.randomUUID(); + } + + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(rnds); +} + +var _default = v4; +exports["default"] = _default; + +/***/ }), + +/***/ 880: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(4575)); + +var _sha = _interopRequireDefault(__nccwpck_require__(1352)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports["default"] = _default; + +/***/ }), + +/***/ 1579: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _regex = _interopRequireDefault(__nccwpck_require__(1118)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports["default"] = _default; + +/***/ }), + +/***/ 6257: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(1579)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.slice(14, 15), 16); +} + +var _default = version; +exports["default"] = _default; + /***/ }), /***/ 770: @@ -59887,6 +86241,705 @@ if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { exports.debug = debug; // for test +/***/ }), + +/***/ 4488: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + + +/** + * For Node.js, simply re-export the core `util.deprecate` function. + */ + +module.exports = __nccwpck_require__(9023).deprecate; + + +/***/ }), + +/***/ 2048: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +Object.defineProperty(exports, "v1", ({ + enumerable: true, + get: function () { + return _v.default; + } +})); +Object.defineProperty(exports, "v3", ({ + enumerable: true, + get: function () { + return _v2.default; + } +})); +Object.defineProperty(exports, "v4", ({ + enumerable: true, + get: function () { + return _v3.default; + } +})); +Object.defineProperty(exports, "v5", ({ + enumerable: true, + get: function () { + return _v4.default; + } +})); +Object.defineProperty(exports, "NIL", ({ + enumerable: true, + get: function () { + return _nil.default; + } +})); +Object.defineProperty(exports, "version", ({ + enumerable: true, + get: function () { + return _version.default; + } +})); +Object.defineProperty(exports, "validate", ({ + enumerable: true, + get: function () { + return _validate.default; + } +})); +Object.defineProperty(exports, "stringify", ({ + enumerable: true, + get: function () { + return _stringify.default; + } +})); +Object.defineProperty(exports, "parse", ({ + enumerable: true, + get: function () { + return _parse.default; + } +})); + +var _v = _interopRequireDefault(__nccwpck_require__(6415)); + +var _v2 = _interopRequireDefault(__nccwpck_require__(1697)); + +var _v3 = _interopRequireDefault(__nccwpck_require__(4676)); + +var _v4 = _interopRequireDefault(__nccwpck_require__(9771)); + +var _nil = _interopRequireDefault(__nccwpck_require__(7723)); + +var _version = _interopRequireDefault(__nccwpck_require__(5868)); + +var _validate = _interopRequireDefault(__nccwpck_require__(6200)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(7597)); + +var _parse = _interopRequireDefault(__nccwpck_require__(7267)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/***/ }), + +/***/ 216: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); +} + +var _default = md5; +exports["default"] = _default; + +/***/ }), + +/***/ 7723: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports["default"] = _default; + +/***/ }), + +/***/ 7267: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(6200)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports["default"] = _default; + +/***/ }), + +/***/ 7879: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports["default"] = _default; + +/***/ }), + +/***/ 2973: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = rng; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; + +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); + + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} + +/***/ }), + +/***/ 507: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('sha1').update(bytes).digest(); +} + +var _default = sha1; +exports["default"] = _default; + +/***/ }), + +/***/ 7597: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(6200)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).substr(1)); +} + +function stringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports["default"] = _default; + +/***/ }), + +/***/ 6415: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(2973)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(7597)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.default)(b); +} + +var _default = v1; +exports["default"] = _default; + +/***/ }), + +/***/ 1697: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(2930)); + +var _md = _interopRequireDefault(__nccwpck_require__(216)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports["default"] = _default; + +/***/ }), + +/***/ 2930: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = _default; +exports.URL = exports.DNS = void 0; + +var _stringify = _interopRequireDefault(__nccwpck_require__(7597)); + +var _parse = _interopRequireDefault(__nccwpck_require__(7267)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function _default(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (namespace.length !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.default)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} + +/***/ }), + +/***/ 4676: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(2973)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(7597)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.default)(rnds); +} + +var _default = v4; +exports["default"] = _default; + +/***/ }), + +/***/ 9771: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(2930)); + +var _sha = _interopRequireDefault(__nccwpck_require__(507)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports["default"] = _default; + +/***/ }), + +/***/ 6200: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _regex = _interopRequireDefault(__nccwpck_require__(7879)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports["default"] = _default; + +/***/ }), + +/***/ 5868: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(6200)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.substr(14, 1), 16); +} + +var _default = version; +exports["default"] = _default; + +/***/ }), + +/***/ 8264: +/***/ ((module) => { + +// Returns a wrapper function that returns a wrapped callback +// The wrapper function should do some stuff, and return a +// presumably different callback function. +// This makes sure that own properties are retained, so that +// decorations and such are not lost along the way. +module.exports = wrappy +function wrappy (fn, cb) { + if (fn && cb) return wrappy(fn)(cb) + + if (typeof fn !== 'function') + throw new TypeError('need wrapper function') + + Object.keys(fn).forEach(function (k) { + wrapper[k] = fn[k] + }) + + return wrapper + + function wrapper() { + var args = new Array(arguments.length) + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i] + } + var ret = fn.apply(this, args) + var cb = args[args.length-1] + if (typeof ret === 'function' && ret !== cb) { + Object.keys(cb).forEach(function (k) { + ret[k] = cb[k] + }) + } + return ret + } +} + + /***/ }), /***/ 8736: @@ -64894,6 +91947,81 @@ exports.debug = debug; // for test }).call(this); +/***/ }), + +/***/ 538: +/***/ ((module) => { + +class Node { + /// value; + /// next; + + constructor(value) { + this.value = value; + + // TODO: Remove this when targeting Node.js 12. + this.next = undefined; + } +} + +class Queue { + // TODO: Use private class fields when targeting Node.js 12. + // #_head; + // #_tail; + // #_size; + + constructor() { + this.clear(); + } + + enqueue(value) { + const node = new Node(value); + + if (this._head) { + this._tail.next = node; + this._tail = node; + } else { + this._head = node; + this._tail = node; + } + + this._size++; + } + + dequeue() { + const current = this._head; + if (!current) { + return; + } + + this._head = this._head.next; + this._size--; + return current.value; + } + + clear() { + this._head = undefined; + this._tail = undefined; + this._size = 0; + } + + get size() { + return this._size; + } + + * [Symbol.iterator]() { + let current = this._head; + + while (current) { + yield current.value; + current = current.next; + } + } +} + +module.exports = Queue; + + /***/ }), /***/ 7242: @@ -64911,25 +92039,27 @@ var Inputs; Inputs["UploadChunkSize"] = "upload-chunk-size"; Inputs["EnableCrossOsArchive"] = "enableCrossOsArchive"; Inputs["FailOnCacheMiss"] = "fail-on-cache-miss"; - Inputs["LookupOnly"] = "lookup-only"; // Input for cache, restore action -})(Inputs = exports.Inputs || (exports.Inputs = {})); + Inputs["LookupOnly"] = "lookup-only"; + Inputs["GCSBucket"] = "gcs-bucket"; + Inputs["GCSPathPrefix"] = "gcs-path-prefix"; // Input for cache, restore, save action +})(Inputs || (exports.Inputs = Inputs = {})); var Outputs; (function (Outputs) { Outputs["CacheHit"] = "cache-hit"; Outputs["CachePrimaryKey"] = "cache-primary-key"; Outputs["CacheMatchedKey"] = "cache-matched-key"; // Output from restore action -})(Outputs = exports.Outputs || (exports.Outputs = {})); +})(Outputs || (exports.Outputs = Outputs = {})); var State; (function (State) { State["CachePrimaryKey"] = "CACHE_KEY"; State["CacheMatchedKey"] = "CACHE_RESULT"; -})(State = exports.State || (exports.State = {})); +})(State || (exports.State = State = {})); var Events; (function (Events) { Events["Key"] = "GITHUB_EVENT_NAME"; Events["Push"] = "push"; Events["PullRequest"] = "pull_request"; -})(Events = exports.Events || (exports.Events = {})); +})(Events || (exports.Events = Events = {})); exports.RefKey = "GITHUB_REF"; @@ -64956,13 +92086,23 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( }) : function(o, v) { o["default"] = v; }); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { @@ -64973,8 +92113,10 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.restoreRun = exports.restoreOnlyRun = exports.restoreImpl = void 0; -const cache = __importStar(__nccwpck_require__(5116)); +exports.restoreImpl = restoreImpl; +exports.restoreOnlyRun = restoreOnlyRun; +exports.restoreRun = restoreRun; +const cache = __importStar(__nccwpck_require__(9614)); const core = __importStar(__nccwpck_require__(7484)); const constants_1 = __nccwpck_require__(7242); const stateProvider_1 = __nccwpck_require__(2879); @@ -65033,7 +92175,6 @@ function restoreImpl(stateProvider, earlyExit) { } }); } -exports.restoreImpl = restoreImpl; function run(stateProvider, earlyExit) { return __awaiter(this, void 0, void 0, function* () { yield restoreImpl(stateProvider, earlyExit); @@ -65052,13 +92193,11 @@ function restoreOnlyRun(earlyExit) { yield run(new stateProvider_1.NullStateProvider(), earlyExit); }); } -exports.restoreOnlyRun = restoreOnlyRun; function restoreRun(earlyExit) { return __awaiter(this, void 0, void 0, function* () { yield run(new stateProvider_1.StateProvider(), earlyExit); }); } -exports.restoreRun = restoreRun; /***/ }), @@ -65084,13 +92223,23 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( }) : function(o, v) { o["default"] = v; }); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); Object.defineProperty(exports, "__esModule", ({ value: true })); exports.NullStateProvider = exports.StateProvider = void 0; const core = __importStar(__nccwpck_require__(7484)); @@ -65159,15 +92308,33 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( }) : function(o, v) { o["default"] = v; }); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.isCacheFeatureAvailable = exports.getInputAsBool = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.isExactKeyMatch = exports.isGhes = void 0; +exports.isGhes = isGhes; +exports.isExactKeyMatch = isExactKeyMatch; +exports.logWarning = logWarning; +exports.isValidEvent = isValidEvent; +exports.getInputAsArray = getInputAsArray; +exports.getInputAsInt = getInputAsInt; +exports.getInputAsBool = getInputAsBool; +exports.isGCSAvailable = isGCSAvailable; +exports.isCacheFeatureAvailable = isCacheFeatureAvailable; const cache = __importStar(__nccwpck_require__(5116)); const core = __importStar(__nccwpck_require__(7484)); const constants_1 = __nccwpck_require__(7242); @@ -65179,25 +92346,21 @@ function isGhes() { const isLocalHost = hostname.endsWith(".LOCALHOST"); return !isGitHubHost && !isGitHubEnterpriseCloudHost && !isLocalHost; } -exports.isGhes = isGhes; function isExactKeyMatch(key, cacheKey) { return !!(cacheKey && cacheKey.localeCompare(key, undefined, { sensitivity: "accent" }) === 0); } -exports.isExactKeyMatch = isExactKeyMatch; function logWarning(message) { const warningPrefix = "[warning]"; core.info(`${warningPrefix}${message}`); } -exports.logWarning = logWarning; // Cache token authorized for all events that are tied to a ref // See GitHub Context https://help.github.com/actions/automating-your-workflow-with-github-actions/contexts-and-expression-syntax-for-github-actions#github-context function isValidEvent() { return constants_1.RefKey in process.env && Boolean(process.env[constants_1.RefKey]); } -exports.isValidEvent = isValidEvent; function getInputAsArray(name, options) { return core .getInput(name, options) @@ -65205,7 +92368,6 @@ function getInputAsArray(name, options) { .map(s => s.replace(/^!\s+/, "!").trim()) .filter(x => x !== ""); } -exports.getInputAsArray = getInputAsArray; function getInputAsInt(name, options) { const value = parseInt(core.getInput(name, options)); if (isNaN(value) || value < 0) { @@ -65213,13 +92375,31 @@ function getInputAsInt(name, options) { } return value; } -exports.getInputAsInt = getInputAsInt; function getInputAsBool(name, options) { const result = core.getInput(name, options); return result.toLowerCase() === "true"; } -exports.getInputAsBool = getInputAsBool; +// Check if GCS is configured and available +function isGCSAvailable() { + try { + const bucket = core.getInput(constants_1.Inputs.GCSBucket); + if (!bucket) { + core.info("GCS bucket name not provided, falling back to GitHub cache"); + return false; + } + return true; + } + catch (error) { + logWarning(`Failed to check GCS availability: ${error.message}`); + return false; + } +} function isCacheFeatureAvailable() { + // Check if GCS cache is available + if (isGCSAvailable()) { + return true; + } + // Otherwise, check GitHub cache if (cache.isFeatureAvailable()) { return true; } @@ -65231,7 +92411,242 @@ Otherwise please upgrade to GHES version >= 3.5 and If you are also using Github logWarning("An internal error has occurred in cache backend. Please check https://www.githubstatus.com/ for any ongoing issue in actions."); return false; } -exports.isCacheFeatureAvailable = isCacheFeatureAvailable; + + +/***/ }), + +/***/ 9614: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.restoreCache = restoreCache; +exports.saveCache = saveCache; +exports.isFeatureAvailable = isFeatureAvailable; +const core = __importStar(__nccwpck_require__(7484)); +const path = __importStar(__nccwpck_require__(6928)); +const constants_1 = __nccwpck_require__(7242); +const actionUtils_1 = __nccwpck_require__(8270); +const utils = __importStar(__nccwpck_require__(8299)); +const storage_1 = __nccwpck_require__(8525); +const cache = __importStar(__nccwpck_require__(5116)); +const tar_1 = __nccwpck_require__(5321); +const DEFAULT_PATH_PREFIX = "github-cache"; +// Function to initialize GCS client using Application Default Credentials +function getGCSClient() { + try { + core.info("Initializing GCS client"); + return new storage_1.Storage(); + } + catch (error) { + core.warning(`Failed to initialize GCS client: ${error.message}`); + return null; + } +} +function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive) { + return __awaiter(this, void 0, void 0, function* () { + // Check if GCS is available + if ((0, actionUtils_1.isGCSAvailable)()) { + try { + const result = yield restoreFromGCS(paths, primaryKey, restoreKeys, options); + if (result) { + core.info(`Cache restored from GCS with key: ${result}`); + return result; + } + core.info("Cache not found in GCS, falling back to GitHub cache"); + } + catch (error) { + core.warning(`Failed to restore from GCS: ${error.message}`); + core.info("Falling back to GitHub cache"); + } + } + // Fall back to GitHub cache + return yield cache.restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive); + }); +} +function saveCache(paths, key, options, enableCrossOsArchive) { + return __awaiter(this, void 0, void 0, function* () { + if ((0, actionUtils_1.isGCSAvailable)()) { + try { + const result = yield saveToGCS(paths, key); + if (result) { + core.info(`Cache saved to GCS with key: ${key}`); + return result; // Success ID + } + core.warning("Failed to save to GCS, falling back to GitHub cache"); + } + catch (error) { + core.warning(`Failed to save to GCS: ${error.message}`); + core.info("Falling back to GitHub cache"); + } + } + // Fall back to GitHub cache + return yield cache.saveCache(paths, key, options, enableCrossOsArchive); + }); +} +// Function that checks if the cache feature is available (either GCS or GitHub cache) +function isFeatureAvailable() { + return (0, actionUtils_1.isGCSAvailable)() || cache.isFeatureAvailable(); +} +function restoreFromGCS(_paths_1, primaryKey_1) { + return __awaiter(this, arguments, void 0, function* (_paths, // validate paths? + primaryKey, restoreKeys = [], options) { + const storage = getGCSClient(); + if (!storage) { + return undefined; + } + const bucket = core.getInput(constants_1.Inputs.GCSBucket); + const pathPrefix = core.getInput(constants_1.Inputs.GCSPathPrefix) || DEFAULT_PATH_PREFIX; + const compressionMethod = yield utils.getCompressionMethod(); + const archiveFolder = yield utils.createTempDirectory(); + const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + const keys = [primaryKey, ...restoreKeys]; + const gcsPath = yield findFileOnGCS(storage, bucket, pathPrefix, keys, compressionMethod); + if (!gcsPath) { + core.info(`No matching cache found`); + return undefined; + } + // If lookup only, just return the key + if (options === null || options === void 0 ? void 0 : options.lookupOnly) { + core.info(`Cache found in GCS with key: ${gcsPath}`); + return gcsPath; + } + try { + core.info(`Downloading from GCS: ${bucket}/${gcsPath}`); + const file = storage.bucket(bucket).file(gcsPath); + yield file.download({ destination: archivePath }); + if (core.isDebug()) { + yield (0, tar_1.listTar)(archivePath, compressionMethod); + } + const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); + core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + yield (0, tar_1.extractTar)(archivePath, compressionMethod); + core.info('Cache restored successfully'); + return gcsPath; + } + catch (error) { + core.warning(`Failed to restore: ${error.message}`); + } + finally { + try { + yield utils.unlinkFile(archivePath); + } + catch (error) { + core.debug(`Failed to delete archive: ${error}`); + } + } + }); +} +function getGCSPath(pathPrefix, key, compressionMethod) { + return `${pathPrefix}/${key}.${utils.getCacheFileName(compressionMethod)}`; +} +function saveToGCS(paths, key) { + return __awaiter(this, void 0, void 0, function* () { + let cacheId = -1; + const storage = getGCSClient(); + if (!storage) { + return cacheId; + } + const bucket = core.getInput(constants_1.Inputs.GCSBucket); + const pathPrefix = core.getInput(constants_1.Inputs.GCSPathPrefix) || DEFAULT_PATH_PREFIX; + const compressionMethod = yield utils.getCompressionMethod(); + const cachePaths = yield utils.resolvePaths(paths); + core.debug('Cache Paths:'); + core.debug(`${JSON.stringify(cachePaths)}`); + if (cachePaths.length === 0) { + throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); + } + const archiveFolder = yield utils.createTempDirectory(); + const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + core.debug(`Archive Path: ${archivePath}`); + try { + yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); + if (core.isDebug()) { + yield (0, tar_1.listTar)(archivePath, compressionMethod); + } + const gcsPath = getGCSPath(pathPrefix, key, compressionMethod); + core.info(`Uploading to GCS: ${bucket}/${gcsPath}`); + yield storage.bucket(bucket).upload(archivePath, { + destination: gcsPath, + resumable: true, // this may not be necessary + }); + return 1; + } + catch (error) { + core.warning(`Error creating or uploading cache: ${error.message}`); + return -1; + } + finally { + try { + yield utils.unlinkFile(archivePath); + } + catch (error) { + core.debug(`Failed to delete archive: ${error}`); + } + } + }); +} +function findFileOnGCS(storage, bucket, pathPrefix, keys, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + for (const key of keys) { + const gcsPath = getGCSPath(pathPrefix, key, compressionMethod); + if (yield checkFileExists(storage, bucket, gcsPath)) { + core.info(`Found file on bucket: ${bucket} with key: ${gcsPath}`); + return gcsPath; + } + } + return undefined; + }); +} +function checkFileExists(storage, bucket, path) { + return __awaiter(this, void 0, void 0, function* () { + const [exists] = yield storage.bucket(bucket).file(path).exists(); + return exists; + }); +} /***/ }), @@ -65316,6 +92731,30 @@ module.exports = require("net"); /***/ }), +/***/ 8474: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:events"); + +/***/ }), + +/***/ 1708: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:process"); + +/***/ }), + +/***/ 7975: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:util"); + +/***/ }), + /***/ 857: /***/ ((module) => { @@ -65340,6 +92779,14 @@ module.exports = require("punycode"); /***/ }), +/***/ 3480: +/***/ ((module) => { + +"use strict"; +module.exports = require("querystring"); + +/***/ }), + /***/ 2203: /***/ ((module) => { @@ -65372,6 +92819,14 @@ module.exports = require("tls"); /***/ }), +/***/ 2018: +/***/ ((module) => { + +"use strict"; +module.exports = require("tty"); + +/***/ }), + /***/ 7016: /***/ ((module) => { @@ -65394,6 +92849,14404 @@ module.exports = require("util"); "use strict"; module.exports = require("zlib"); +/***/ }), + +/***/ 6637: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AclRoleAccessorMethods = exports.Acl = void 0; +const promisify_1 = __nccwpck_require__(206); +/** + * Attach functionality to a {@link Storage.acl} instance. This will add an + * object for each role group (owners, readers, and writers), with each object + * containing methods to add or delete a type of entity. + * + * As an example, here are a few methods that are created. + * + * myBucket.acl.readers.deleteGroup('groupId', function(err) {}); + * + * myBucket.acl.owners.addUser('email@example.com', function(err, acl) {}); + * + * myBucket.acl.writers.addDomain('example.com', function(err, acl) {}); + * + * @private + */ +class AclRoleAccessorMethods { + constructor() { + this.owners = {}; + this.readers = {}; + this.writers = {}; + /** + * An object of convenience methods to add or delete owner ACL permissions + * for a given entity. + * + * The supported methods include: + * + * - `myFile.acl.owners.addAllAuthenticatedUsers` + * - `myFile.acl.owners.deleteAllAuthenticatedUsers` + * - `myFile.acl.owners.addAllUsers` + * - `myFile.acl.owners.deleteAllUsers` + * - `myFile.acl.owners.addDomain` + * - `myFile.acl.owners.deleteDomain` + * - `myFile.acl.owners.addGroup` + * - `myFile.acl.owners.deleteGroup` + * - `myFile.acl.owners.addProject` + * - `myFile.acl.owners.deleteProject` + * - `myFile.acl.owners.addUser` + * - `myFile.acl.owners.deleteUser` + * + * @name Acl#owners + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * //- + * // Add a user as an owner of a file. + * //- + * const myBucket = gcs.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * myFile.acl.owners.addUser('email@example.com', function(err, aclObject) + * {}); + * + * //- + * // For reference, the above command is the same as running the following. + * //- + * myFile.acl.add({ + * entity: 'user-email@example.com', + * role: gcs.acl.OWNER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.owners.addUser('email@example.com').then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + this.owners = {}; + /** + * An object of convenience methods to add or delete reader ACL permissions + * for a given entity. + * + * The supported methods include: + * + * - `myFile.acl.readers.addAllAuthenticatedUsers` + * - `myFile.acl.readers.deleteAllAuthenticatedUsers` + * - `myFile.acl.readers.addAllUsers` + * - `myFile.acl.readers.deleteAllUsers` + * - `myFile.acl.readers.addDomain` + * - `myFile.acl.readers.deleteDomain` + * - `myFile.acl.readers.addGroup` + * - `myFile.acl.readers.deleteGroup` + * - `myFile.acl.readers.addProject` + * - `myFile.acl.readers.deleteProject` + * - `myFile.acl.readers.addUser` + * - `myFile.acl.readers.deleteUser` + * + * @name Acl#readers + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * //- + * // Add a user as a reader of a file. + * //- + * myFile.acl.readers.addUser('email@example.com', function(err, aclObject) + * {}); + * + * //- + * // For reference, the above command is the same as running the following. + * //- + * myFile.acl.add({ + * entity: 'user-email@example.com', + * role: gcs.acl.READER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.readers.addUser('email@example.com').then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + this.readers = {}; + /** + * An object of convenience methods to add or delete writer ACL permissions + * for a given entity. + * + * The supported methods include: + * + * - `myFile.acl.writers.addAllAuthenticatedUsers` + * - `myFile.acl.writers.deleteAllAuthenticatedUsers` + * - `myFile.acl.writers.addAllUsers` + * - `myFile.acl.writers.deleteAllUsers` + * - `myFile.acl.writers.addDomain` + * - `myFile.acl.writers.deleteDomain` + * - `myFile.acl.writers.addGroup` + * - `myFile.acl.writers.deleteGroup` + * - `myFile.acl.writers.addProject` + * - `myFile.acl.writers.deleteProject` + * - `myFile.acl.writers.addUser` + * - `myFile.acl.writers.deleteUser` + * + * @name Acl#writers + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * //- + * // Add a user as a writer of a file. + * //- + * myFile.acl.writers.addUser('email@example.com', function(err, aclObject) + * {}); + * + * //- + * // For reference, the above command is the same as running the following. + * //- + * myFile.acl.add({ + * entity: 'user-email@example.com', + * role: gcs.acl.WRITER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.writers.addUser('email@example.com').then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + this.writers = {}; + AclRoleAccessorMethods.roles.forEach(this._assignAccessMethods.bind(this)); + } + _assignAccessMethods(role) { + const accessMethods = AclRoleAccessorMethods.accessMethods; + const entities = AclRoleAccessorMethods.entities; + const roleGroup = role.toLowerCase() + 's'; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this[roleGroup] = entities.reduce((acc, entity) => { + const isPrefix = entity.charAt(entity.length - 1) === '-'; + accessMethods.forEach(accessMethod => { + let method = accessMethod + entity[0].toUpperCase() + entity.substring(1); + if (isPrefix) { + method = method.replace('-', ''); + } + // Wrap the parent accessor method (e.g. `add` or `delete`) to avoid the + // more complex API of specifying an `entity` and `role`. + // eslint-disable-next-line @typescript-eslint/no-explicit-any + acc[method] = (entityId, options, callback) => { + let apiEntity; + if (typeof options === 'function') { + callback = options; + options = {}; + } + if (isPrefix) { + apiEntity = entity + entityId; + } + else { + // If the entity is not a prefix, it is a special entity group + // that does not require further details. The accessor methods + // only accept a callback. + apiEntity = entity; + callback = entityId; + } + options = Object.assign({ + entity: apiEntity, + role, + }, options); + const args = [options]; + if (typeof callback === 'function') { + args.push(callback); + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + return this[accessMethod].apply(this, args); + }; + }); + return acc; + }, {}); + } +} +exports.AclRoleAccessorMethods = AclRoleAccessorMethods; +AclRoleAccessorMethods.accessMethods = ['add', 'delete']; +AclRoleAccessorMethods.entities = [ + // Special entity groups that do not require further specification. + 'allAuthenticatedUsers', + 'allUsers', + // Entity groups that require specification, e.g. `user-email@example.com`. + 'domain-', + 'group-', + 'project-', + 'user-', +]; +AclRoleAccessorMethods.roles = ['OWNER', 'READER', 'WRITER']; +/** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * An ACL consists of one or more entries, where each entry grants permissions + * to an entity. Permissions define the actions that can be performed against an + * object or bucket (for example, `READ` or `WRITE`); the entity defines who the + * permission applies to (for example, a specific user or group of users). + * + * Where an `entity` value is accepted, we follow the format the Cloud Storage + * API expects. + * + * Refer to + * https://cloud.google.com/storage/docs/json_api/v1/defaultObjectAccessControls + * for the most up-to-date values. + * + * - `user-userId` + * - `user-email` + * - `group-groupId` + * - `group-email` + * - `domain-domain` + * - `project-team-projectId` + * - `allUsers` + * - `allAuthenticatedUsers` + * + * Examples: + * + * - The user "liz@example.com" would be `user-liz@example.com`. + * - The group "example@googlegroups.com" would be + * `group-example@googlegroups.com`. + * - To refer to all members of the Google Apps for Business domain + * "example.com", the entity would be `domain-example.com`. + * + * For more detailed information, see + * {@link http://goo.gl/6qBBPO| About Access Control Lists}. + * + * @constructor Acl + * @mixin + * @param {object} options Configuration options. + */ +class Acl extends AclRoleAccessorMethods { + constructor(options) { + super(); + this.pathPrefix = options.pathPrefix; + this.request_ = options.request; + } + /** + * @typedef {array} AddAclResponse + * @property {object} 0 The Acl Objects. + * @property {object} 1 The full API response. + */ + /** + * @callback AddAclCallback + * @param {?Error} err Request error, if any. + * @param {object} acl The Acl Objects. + * @param {object} apiResponse The full API response. + */ + /** + * Add access controls on a {@link Bucket} or {@link File}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/insert| BucketAccessControls: insert API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/insert| ObjectAccessControls: insert API Documentation} + * + * @param {object} options Configuration options. + * @param {string} options.entity Whose permissions will be added. + * @param {string} options.role Permissions allowed for the defined entity. + * See {@link https://cloud.google.com/storage/docs/access-control Access + * Control}. + * @param {number} [options.generation] **File Objects Only** Select a specific + * revision of this file (as opposed to the latest version, the default). + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {AddAclCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * const options = { + * entity: 'user-useremail@example.com', + * role: gcs.acl.OWNER_ROLE + * }; + * + * myBucket.acl.add(options, function(err, aclObject, apiResponse) {}); + * + * //- + * // For file ACL operations, you can also specify a `generation` property. + * // Here is how you would grant ownership permissions to a user on a + * specific + * // revision of a file. + * //- + * myFile.acl.add({ + * entity: 'user-useremail@example.com', + * role: gcs.acl.OWNER_ROLE, + * generation: 1 + * }, function(err, aclObject, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_add_file_owner + * Example of adding an owner to a file: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_owner + * Example of adding an owner to a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_default_owner + * Example of adding a default owner to a bucket: + */ + add(options, callback) { + const query = {}; + if (options.generation) { + query.generation = options.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + this.request({ + method: 'POST', + uri: '', + qs: query, + maxRetries: 0, //explicitly set this value since this is a non-idempotent function + json: { + entity: options.entity, + role: options.role.toUpperCase(), + }, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + callback(null, this.makeAclObject_(resp), resp); + }); + } + /** + * @typedef {array} RemoveAclResponse + * @property {object} 0 The full API response. + */ + /** + * @callback RemoveAclCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Delete access controls on a {@link Bucket} or {@link File}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/delete| BucketAccessControls: delete API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/delete| ObjectAccessControls: delete API Documentation} + * + * @param {object} options Configuration object. + * @param {string} options.entity Whose permissions will be revoked. + * @param {int} [options.generation] **File Objects Only** Select a specific + * revision of this file (as opposed to the latest version, the default). + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {RemoveAclCallback} callback The callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * myBucket.acl.delete({ + * entity: 'user-useremail@example.com' + * }, function(err, apiResponse) {}); + * + * //- + * // For file ACL operations, you can also specify a `generation` property. + * //- + * myFile.acl.delete({ + * entity: 'user-useremail@example.com', + * generation: 1 + * }, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.delete().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_owner + * Example of removing an owner from a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_default_owner + * Example of removing a default owner from a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_file_owner + * Example of removing an owner from a bucket: + */ + delete(options, callback) { + const query = {}; + if (options.generation) { + query.generation = options.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + this.request({ + method: 'DELETE', + uri: '/' + encodeURIComponent(options.entity), + qs: query, + }, (err, resp) => { + callback(err, resp); + }); + } + /** + * @typedef {array} GetAclResponse + * @property {object|object[]} 0 Single or array of Acl Objects. + * @property {object} 1 The full API response. + */ + /** + * @callback GetAclCallback + * @param {?Error} err Request error, if any. + * @param {object|object[]} acl Single or array of Acl Objects. + * @param {object} apiResponse The full API response. + */ + /** + * Get access controls on a {@link Bucket} or {@link File}. If + * an entity is omitted, you will receive an array of all applicable access + * controls. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/get| BucketAccessControls: get API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/get| ObjectAccessControls: get API Documentation} + * + * @param {object|function} [options] Configuration options. If you want to + * receive a list of all access controls, pass the callback function as + * the only argument. + * @param {string} options.entity Whose permissions will be fetched. + * @param {number} [options.generation] **File Objects Only** Select a specific + * revision of this file (as opposed to the latest version, the default). + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetAclCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * myBucket.acl.get({ + * entity: 'user-useremail@example.com' + * }, function(err, aclObject, apiResponse) {}); + * + * //- + * // Get all access controls. + * //- + * myBucket.acl.get(function(err, aclObjects, apiResponse) { + * // aclObjects = [ + * // { + * // entity: 'user-useremail@example.com', + * // role: 'owner' + * // } + * // ] + * }); + * + * //- + * // For file ACL operations, you can also specify a `generation` property. + * //- + * myFile.acl.get({ + * entity: 'user-useremail@example.com', + * generation: 1 + * }, function(err, aclObject, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.acl.get().then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_print_file_acl + * Example of printing a file's ACL: + * + * @example include:samples/acl.js + * region_tag:storage_print_file_acl_for_user + * Example of printing a file's ACL for a specific user: + * + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl + * Example of printing a bucket's ACL: + * + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl_for_user + * Example of printing a bucket's ACL for a specific user: + */ + get(optionsOrCallback, cb) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : null; + const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + let path = ''; + const query = {}; + if (options) { + path = '/' + encodeURIComponent(options.entity); + if (options.generation) { + query.generation = options.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + } + this.request({ + uri: path, + qs: query, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + let results; + if (resp.items) { + results = resp.items.map(this.makeAclObject_); + } + else { + results = this.makeAclObject_(resp); + } + callback(null, results, resp); + }); + } + /** + * @typedef {array} UpdateAclResponse + * @property {object} 0 The updated Acl Objects. + * @property {object} 1 The full API response. + */ + /** + * @callback UpdateAclCallback + * @param {?Error} err Request error, if any. + * @param {object} acl The updated Acl Objects. + * @param {object} apiResponse The full API response. + */ + /** + * Update access controls on a {@link Bucket} or {@link File}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/update| BucketAccessControls: update API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/update| ObjectAccessControls: update API Documentation} + * + * @param {object} options Configuration options. + * @param {string} options.entity Whose permissions will be updated. + * @param {string} options.role Permissions allowed for the defined entity. + * See {@link Storage.acl}. + * @param {number} [options.generation] **File Objects Only** Select a specific + * revision of this file (as opposed to the latest version, the default). + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {UpdateAclCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * const options = { + * entity: 'user-useremail@example.com', + * role: gcs.acl.WRITER_ROLE + * }; + * + * myBucket.acl.update(options, function(err, aclObject, apiResponse) {}); + * + * //- + * // For file ACL operations, you can also specify a `generation` property. + * //- + * myFile.acl.update({ + * entity: 'user-useremail@example.com', + * role: gcs.acl.WRITER_ROLE, + * generation: 1 + * }, function(err, aclObject, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.update(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + update(options, callback) { + const query = {}; + if (options.generation) { + query.generation = options.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + this.request({ + method: 'PUT', + uri: '/' + encodeURIComponent(options.entity), + qs: query, + json: { + role: options.role.toUpperCase(), + }, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + callback(null, this.makeAclObject_(resp), resp); + }); + } + /** + * Transform API responses to a consistent object format. + * + * @private + */ + makeAclObject_(accessControlObject) { + const obj = { + entity: accessControlObject.entity, + role: accessControlObject.role, + }; + if (accessControlObject.projectTeam) { + obj.projectTeam = accessControlObject.projectTeam; + } + return obj; + } + /** + * Patch requests up to the bucket's request object. + * + * @private + * + * @param {string} method Action. + * @param {string} path Request path. + * @param {*} query Request query object. + * @param {*} body Request body contents. + * @param {function} callback Callback function. + */ + request(reqOpts, callback) { + reqOpts.uri = this.pathPrefix + reqOpts.uri; + this.request_(reqOpts, callback); + } +} +exports.Acl = Acl; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Acl, { + exclude: ['request'], +}); + + +/***/ }), + +/***/ 2887: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Bucket = exports.BucketExceptionMessages = exports.AvailableServiceObjectMethods = exports.BucketActionToHTTPMethod = void 0; +const index_js_1 = __nccwpck_require__(1325); +const paginator_1 = __nccwpck_require__(9469); +const promisify_1 = __nccwpck_require__(206); +const fs = __importStar(__nccwpck_require__(9896)); +const mime_1 = __importDefault(__nccwpck_require__(4900)); +const path = __importStar(__nccwpck_require__(6928)); +const p_limit_1 = __importDefault(__nccwpck_require__(9977)); +const util_1 = __nccwpck_require__(9023); +const async_retry_1 = __importDefault(__nccwpck_require__(5195)); +const util_js_1 = __nccwpck_require__(4557); +const acl_js_1 = __nccwpck_require__(6637); +const file_js_1 = __nccwpck_require__(9975); +const iam_js_1 = __nccwpck_require__(2880); +const notification_js_1 = __nccwpck_require__(8598); +const storage_js_1 = __nccwpck_require__(2848); +const signer_js_1 = __nccwpck_require__(7319); +const stream_1 = __nccwpck_require__(2203); +const url_1 = __nccwpck_require__(7016); +var BucketActionToHTTPMethod; +(function (BucketActionToHTTPMethod) { + BucketActionToHTTPMethod["list"] = "GET"; +})(BucketActionToHTTPMethod || (exports.BucketActionToHTTPMethod = BucketActionToHTTPMethod = {})); +var AvailableServiceObjectMethods; +(function (AvailableServiceObjectMethods) { + AvailableServiceObjectMethods[AvailableServiceObjectMethods["setMetadata"] = 0] = "setMetadata"; + AvailableServiceObjectMethods[AvailableServiceObjectMethods["delete"] = 1] = "delete"; +})(AvailableServiceObjectMethods || (exports.AvailableServiceObjectMethods = AvailableServiceObjectMethods = {})); +var BucketExceptionMessages; +(function (BucketExceptionMessages) { + BucketExceptionMessages["PROVIDE_SOURCE_FILE"] = "You must provide at least one source file."; + BucketExceptionMessages["DESTINATION_FILE_NOT_SPECIFIED"] = "A destination file must be specified."; + BucketExceptionMessages["CHANNEL_ID_REQUIRED"] = "An ID is required to create a channel."; + BucketExceptionMessages["TOPIC_NAME_REQUIRED"] = "A valid topic name is required."; + BucketExceptionMessages["CONFIGURATION_OBJECT_PREFIX_REQUIRED"] = "A configuration object with a prefix is required."; + BucketExceptionMessages["SPECIFY_FILE_NAME"] = "A file name must be specified."; + BucketExceptionMessages["METAGENERATION_NOT_PROVIDED"] = "A metageneration must be provided."; + BucketExceptionMessages["SUPPLY_NOTIFICATION_ID"] = "You must supply a notification ID."; +})(BucketExceptionMessages || (exports.BucketExceptionMessages = BucketExceptionMessages = {})); +/** + * @callback Crc32cGeneratorToStringCallback + * A method returning the CRC32C as a base64-encoded string. + * + * @returns {string} + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ +/** + * @callback Crc32cGeneratorValidateCallback + * A method validating a base64-encoded CRC32C string. + * + * @param {string} [value] base64-encoded CRC32C string to validate + * @returns {boolean} + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + **/ +/** + * @callback Crc32cGeneratorUpdateCallback + * A method for passing `Buffer`s for CRC32C generation. + * + * @param {Buffer} [data] data to update CRC32C value with + * @returns {undefined} + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + **/ +/** + * @typedef {object} CRC32CValidator + * @property {Crc32cGeneratorToStringCallback} + * @property {Crc32cGeneratorValidateCallback} + * @property {Crc32cGeneratorUpdateCallback} + */ +/** + * A function that generates a CRC32C Validator. Defaults to {@link CRC32C} + * + * @name Bucket#crc32cGenerator + * @type {CRC32CValidator} + */ +/** + * Get and set IAM policies for your bucket. + * + * @name Bucket#iam + * @mixes Iam + * + * See {@link https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management| Cloud Storage IAM Management} + * See {@link https://cloud.google.com/iam/docs/granting-changing-revoking-access| Granting, Changing, and Revoking Access} + * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Get the IAM policy for your bucket. + * //- + * bucket.iam.getPolicy(function(err, policy) { + * console.log(policy); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.getPolicy().then(function(data) { + * const policy = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/iam.js + * region_tag:storage_view_bucket_iam_members + * Example of retrieving a bucket's IAM policy: + * + * @example include:samples/iam.js + * region_tag:storage_add_bucket_iam_member + * Example of adding to a bucket's IAM policy: + * + * @example include:samples/iam.js + * region_tag:storage_remove_bucket_iam_member + * Example of removing from a bucket's IAM policy: + */ +/** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * An ACL consists of one or more entries, where each entry grants permissions + * to an entity. Permissions define the actions that can be performed against + * an object or bucket (for example, `READ` or `WRITE`); the entity defines + * who the permission applies to (for example, a specific user or group of + * users). + * + * The `acl` object on a Bucket instance provides methods to get you a list of + * the ACLs defined on your bucket, as well as set, update, and delete them. + * + * Buckets also have + * {@link https://cloud.google.com/storage/docs/access-control/lists#default| default ACLs} + * for all created files. Default ACLs specify permissions that all new + * objects added to the bucket will inherit by default. You can add, delete, + * get, and update entities and permissions for these as well with + * {@link Bucket#acl.default}. + * + * See {@link http://goo.gl/6qBBPO| About Access Control Lists} + * See {@link https://cloud.google.com/storage/docs/access-control/lists#default| Default ACLs} + * + * @name Bucket#acl + * @mixes Acl + * @property {Acl} default Cloud Storage Buckets have + * {@link https://cloud.google.com/storage/docs/access-control/lists#default| default ACLs} + * for all created files. You can add, delete, get, and update entities and + * permissions for these as well. The method signatures and examples are all + * the same, after only prefixing the method call with `default`. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Make a bucket's contents publicly readable. + * //- + * const myBucket = storage.bucket('my-bucket'); + * + * const options = { + * entity: 'allUsers', + * role: storage.acl.READER_ROLE + * }; + * + * myBucket.acl.add(options, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl + * Example of printing a bucket's ACL: + * + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl_for_user + * Example of printing a bucket's ACL for a specific user: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_owner + * Example of adding an owner to a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_owner + * Example of removing an owner from a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_default_owner + * Example of adding a default owner to a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_default_owner + * Example of removing a default owner from a bucket: + */ +/** + * The API-formatted resource description of the bucket. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name Bucket#metadata + * @type {object} + */ +/** + * The bucket's name. + * @name Bucket#name + * @type {string} + */ +/** + * Get {@link File} objects for the files currently in the bucket as a + * readable object stream. + * + * @method Bucket#getFilesStream + * @param {GetFilesOptions} [query] Query object for listing files. + * @returns {ReadableStream} A readable stream that emits {@link File} instances. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getFilesStream() + * .on('error', console.error) + * .on('data', function(file) { + * // file is a File object. + * }) + * .on('end', function() { + * // All files retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * bucket.getFilesStream() + * .on('data', function(file) { + * this.end(); + * }); + * + * //- + * // If you're filtering files with a delimiter, you should use + * // {@link Bucket#getFiles} and set `autoPaginate: false` in order to + * // preserve the `apiResponse` argument. + * //- + * const prefixes = []; + * + * function callback(err, files, nextQuery, apiResponse) { + * prefixes = prefixes.concat(apiResponse.prefixes); + * + * if (nextQuery) { + * bucket.getFiles(nextQuery, callback); + * } else { + * // prefixes = The finished array of prefixes. + * } + * } + * + * bucket.getFiles({ + * autoPaginate: false, + * delimiter: '/' + * }, callback); + * ``` + */ +/** + * Create a Bucket object to interact with a Cloud Storage bucket. + * + * @class + * @hideconstructor + * + * @param {Storage} storage A {@link Storage} instance. + * @param {string} name The name of the bucket. + * @param {object} [options] Configuration object. + * @param {string} [options.userProject] User project. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * ``` + */ +class Bucket extends index_js_1.ServiceObject { + // eslint-disable-next-line @typescript-eslint/no-unused-vars + getFilesStream(query) { + // placeholder body, overwritten in constructor + return new stream_1.Readable(); + } + constructor(storage, name, options) { + var _a, _b, _c, _d; + options = options || {}; + // Allow for "gs://"-style input, and strip any trailing slashes. + name = name.replace(/^gs:\/\//, '').replace(/\/+$/, ''); + const requestQueryObject = {}; + if ((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) { + requestQueryObject.ifGenerationMatch = + options.preconditionOpts.ifGenerationMatch; + } + if ((_b = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationNotMatch) { + requestQueryObject.ifGenerationNotMatch = + options.preconditionOpts.ifGenerationNotMatch; + } + if ((_c = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _c === void 0 ? void 0 : _c.ifMetagenerationMatch) { + requestQueryObject.ifMetagenerationMatch = + options.preconditionOpts.ifMetagenerationMatch; + } + if ((_d = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _d === void 0 ? void 0 : _d.ifMetagenerationNotMatch) { + requestQueryObject.ifMetagenerationNotMatch = + options.preconditionOpts.ifMetagenerationNotMatch; + } + const userProject = options.userProject; + if (typeof userProject === 'string') { + requestQueryObject.userProject = userProject; + } + const methods = { + /** + * Create a bucket. + * + * @method Bucket#create + * @param {CreateBucketRequest} [metadata] Metadata to set for the bucket. + * @param {CreateBucketCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * bucket.create(function(err, bucket, apiResponse) { + * if (!err) { + * // The bucket was created successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.create().then(function(data) { + * const bucket = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + create: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * IamDeleteBucketOptions Configuration options. + * @property {boolean} [ignoreNotFound = false] Ignore an error if + * the bucket does not exist. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} DeleteBucketResponse + * @property {object} 0 The full API response. + */ + /** + * @callback DeleteBucketCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Delete the bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/delete| Buckets: delete API Documentation} + * + * @method Bucket#delete + * @param {DeleteBucketOptions} [options] Configuration options. + * @param {boolean} [options.ignoreNotFound = false] Ignore an error if + * the bucket does not exist. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {DeleteBucketCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * bucket.delete(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.delete().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/buckets.js + * region_tag:storage_delete_bucket + * Another example: + */ + delete: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {object} BucketExistsOptions Configuration options for Bucket#exists(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} BucketExistsResponse + * @property {boolean} 0 Whether the {@link Bucket} exists. + */ + /** + * @callback BucketExistsCallback + * @param {?Error} err Request error, if any. + * @param {boolean} exists Whether the {@link Bucket} exists. + */ + /** + * Check if the bucket exists. + * + * @method Bucket#exists + * @param {BucketExistsOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {BucketExistsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.exists(function(err, exists) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.exists().then(function(data) { + * const exists = data[0]; + * }); + * ``` + */ + exists: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {object} [GetBucketOptions] Configuration options for Bucket#get() + * @property {boolean} [autoCreate] Automatically create the object if + * it does not exist. Default: `false` + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} GetBucketResponse + * @property {Bucket} 0 The {@link Bucket}. + * @property {object} 1 The full API response. + */ + /** + * @callback GetBucketCallback + * @param {?Error} err Request error, if any. + * @param {Bucket} bucket The {@link Bucket}. + * @param {object} apiResponse The full API response. + */ + /** + * Get a bucket if it exists. + * + * You may optionally use this to "get or create" an object by providing + * an object with `autoCreate` set to `true`. Any extra configuration that + * is normally required for the `create` method must be contained within + * this object as well. + * + * @method Bucket#get + * @param {GetBucketOptions} [options] Configuration options. + * @param {boolean} [options.autoCreate] Automatically create the object if + * it does not exist. Default: `false` + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetBucketCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.get(function(err, bucket, apiResponse) { + * // `bucket.metadata` has been populated. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.get().then(function(data) { + * const bucket = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + get: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} GetBucketMetadataResponse + * @property {object} 0 The bucket metadata. + * @property {object} 1 The full API response. + */ + /** + * @callback GetBucketMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata The bucket metadata. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} GetBucketMetadataOptions Configuration options for Bucket#getMetadata(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Get the bucket's metadata. + * + * To set metadata, see {@link Bucket#setMetadata}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/get| Buckets: get API Documentation} + * + * @method Bucket#getMetadata + * @param {GetBucketMetadataOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetBucketMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getMetadata(function(err, metadata, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getMetadata().then(function(data) { + * const metadata = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/requesterPays.js + * region_tag:storage_get_requester_pays_status + * Example of retrieving the requester pays status of a bucket: + */ + getMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {object} SetBucketMetadataOptions Configuration options for Bucket#setMetadata(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} SetBucketMetadataResponse + * @property {object} apiResponse The full API response. + */ + /** + * @callback SetBucketMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata The bucket metadata. + */ + /** + * Set the bucket's metadata. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} + * + * @method Bucket#setMetadata + * @param {object} metadata The metadata you wish to set. + * @param {SetBucketMetadataOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Set website metadata field on the bucket. + * //- + * const metadata = { + * website: { + * mainPageSuffix: 'http://example.com', + * notFoundPage: 'http://example.com/404.html' + * } + * }; + * + * bucket.setMetadata(metadata, function(err, apiResponse) {}); + * + * //- + * // Enable versioning for your bucket. + * //- + * bucket.setMetadata({ + * versioning: { + * enabled: true + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Enable KMS encryption for objects within this bucket. + * //- + * bucket.setMetadata({ + * encryption: { + * defaultKmsKeyName: 'projects/grape-spaceship-123/...' + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Set the default event-based hold value for new objects in this + * // bucket. + * //- + * bucket.setMetadata({ + * defaultEventBasedHold: true + * }, function(err, apiResponse) {}); + * + * //- + * // Remove object lifecycle rules. + * //- + * bucket.setMetadata({ + * lifecycle: null + * }, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setMetadata(metadata).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + setMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + }; + super({ + parent: storage, + baseUrl: '/b', + id: name, + createMethod: storage.createBucket.bind(storage), + methods, + }); + this.name = name; + this.storage = storage; + this.userProject = options.userProject; + this.acl = new acl_js_1.Acl({ + request: this.request.bind(this), + pathPrefix: '/acl', + }); + this.acl.default = new acl_js_1.Acl({ + request: this.request.bind(this), + pathPrefix: '/defaultObjectAcl', + }); + this.crc32cGenerator = + options.crc32cGenerator || this.storage.crc32cGenerator; + this.iam = new iam_js_1.Iam(this); + this.getFilesStream = paginator_1.paginator.streamify('getFiles'); + this.instanceRetryValue = storage.retryOptions.autoRetry; + this.instancePreconditionOpts = options === null || options === void 0 ? void 0 : options.preconditionOpts; + } + /** + * The bucket's Cloud Storage URI (`gs://`) + * + * @example + * ```ts + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * // `gs://my-bucket` + * const href = bucket.cloudStorageURI.href; + * ``` + */ + get cloudStorageURI() { + const uri = new url_1.URL('gs://'); + uri.host = this.name; + return uri; + } + /** + * @typedef {object} AddLifecycleRuleOptions Configuration options for Bucket#addLifecycleRule(). + * @property {boolean} [append=true] The new rules will be appended to any + * pre-existing rules. + */ + /** + * + * @typedef {object} LifecycleRule The new lifecycle rule to be added to objects + * in this bucket. + * @property {string|object} action The action to be taken upon matching of + * all the conditions 'delete', 'setStorageClass', or 'AbortIncompleteMultipartUpload'. + * **Note**: For configuring a raw-formatted rule object to be passed as `action` + * please refer to the [examples]{@link https://cloud.google.com/storage/docs/managing-lifecycles#configexamples}. + * @property {object} condition Condition a bucket must meet before the + * action occurs on the bucket. Refer to following supported [conditions]{@link https://cloud.google.com/storage/docs/lifecycle#conditions}. + * @property {string} [storageClass] When using the `setStorageClass` + * action, provide this option to dictate which storage class the object + * should update to. Please see + * [SetStorageClass option documentation]{@link https://cloud.google.com/storage/docs/lifecycle#setstorageclass} for supported transitions. + */ + /** + * Add an object lifecycle management rule to the bucket. + * + * By default, an Object Lifecycle Management rule provided to this method + * will be included to the existing policy. To replace all existing rules, + * supply the `options` argument, setting `append` to `false`. + * + * To add multiple rules, pass a list to the `rule` parameter. Calling this + * function multiple times asynchronously does not guarantee that all rules + * are added correctly. + * + * See {@link https://cloud.google.com/storage/docs/lifecycle| Object Lifecycle Management} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} + * + * @param {LifecycleRule|LifecycleRule[]} rule The new lifecycle rule or rules to be added to objects + * in this bucket. + * @param {string|object} rule.action The action to be taken upon matching of + * all the conditions 'delete', 'setStorageClass', or 'AbortIncompleteMultipartUpload'. + * **Note**: For configuring a raw-formatted rule object to be passed as `action` + * please refer to the [examples]{@link https://cloud.google.com/storage/docs/managing-lifecycles#configexamples}. + * @param {object} rule.condition Condition a bucket must meet before the + * action occurson the bucket. Refer to followitn supported [conditions]{@link https://cloud.google.com/storage/docs/lifecycle#conditions}. + * @param {string} [rule.storageClass] When using the `setStorageClass` + * action, provide this option to dictate which storage class the object + * should update to. + * @param {AddLifecycleRuleOptions} [options] Configuration object. + * @param {boolean} [options.append=true] Append the new rule to the existing + * policy. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Automatically have an object deleted from this bucket once it is 3 years + * // of age. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * age: 365 * 3 // Specified in days. + * } + * }, function(err, apiResponse) { + * if (err) { + * // Error handling omitted. + * } + * + * const lifecycleRules = bucket.metadata.lifecycle.rule; + * + * // Iterate over the Object Lifecycle Management rules on this bucket. + * lifecycleRules.forEach(lifecycleRule => {}); + * }); + * + * //- + * // By default, the rule you provide will be added to the existing policy. + * // Optionally, you can disable this behavior to replace all of the + * // pre-existing rules. + * //- + * const options = { + * append: false + * }; + * + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * age: 365 * 3 // Specified in days. + * } + * }, options, function(err, apiResponse) { + * if (err) { + * // Error handling omitted. + * } + * + * // All rules have been replaced with the new "delete" rule. + * + * // Iterate over the Object Lifecycle Management rules on this bucket. + * lifecycleRules.forEach(lifecycleRule => {}); + * }); + * + * //- + * // For objects created before 2018, "downgrade" the storage class. + * //- + * bucket.addLifecycleRule({ + * action: 'setStorageClass', + * storageClass: 'COLDLINE', + * condition: { + * createdBefore: new Date('2018') + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Delete objects created before 2016 which have the Coldline storage + * // class. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * matchesStorageClass: [ + * 'COLDLINE' + * ], + * createdBefore: new Date('2016') + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Delete object that has a noncurrent timestamp that is at least 100 days. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * daysSinceNoncurrentTime: 100 + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Delete object that has a noncurrent timestamp before 2020-01-01. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * noncurrentTimeBefore: new Date('2020-01-01') + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Delete object that has a customTime that is at least 100 days. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * daysSinceCustomTime: 100 + * } + * }, function(err, apiResponse) ()); + * + * //- + * // Delete object that has a customTime before 2020-01-01. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * customTimeBefore: new Date('2020-01-01') + * } + * }, function(err, apiResponse) {}); + * ``` + */ + addLifecycleRule(rule, optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + options = options || {}; + const rules = Array.isArray(rule) ? rule : [rule]; + for (const curRule of rules) { + if (curRule.condition.createdBefore instanceof Date) { + curRule.condition.createdBefore = curRule.condition.createdBefore + .toISOString() + .replace(/T.+$/, ''); + } + if (curRule.condition.customTimeBefore instanceof Date) { + curRule.condition.customTimeBefore = curRule.condition.customTimeBefore + .toISOString() + .replace(/T.+$/, ''); + } + if (curRule.condition.noncurrentTimeBefore instanceof Date) { + curRule.condition.noncurrentTimeBefore = + curRule.condition.noncurrentTimeBefore + .toISOString() + .replace(/T.+$/, ''); + } + } + if (options.append === false) { + this.setMetadata({ lifecycle: { rule: rules } }, options, callback); + return; + } + // The default behavior appends the previously-defined lifecycle rules with + // the new ones just passed in by the user. + this.getMetadata((err, metadata) => { + var _a, _b; + if (err) { + callback(err); + return; + } + const currentLifecycleRules = Array.isArray((_a = metadata.lifecycle) === null || _a === void 0 ? void 0 : _a.rule) + ? (_b = metadata.lifecycle) === null || _b === void 0 ? void 0 : _b.rule + : []; + this.setMetadata({ + lifecycle: { rule: currentLifecycleRules.concat(rules) }, + }, options, callback); + }); + } + /** + * @typedef {object} CombineOptions + * @property {string} [kmsKeyName] Resource name of the Cloud KMS key, of + * the form + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`, + * that will be used to encrypt the object. Overwrites the object + * metadata's `kms_key_name` value, if any. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback CombineCallback + * @param {?Error} err Request error, if any. + * @param {File} newFile The new {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} CombineResponse + * @property {File} 0 The new {@link File}. + * @property {object} 1 The full API response. + */ + /** + * Combine multiple files into one new file. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/compose| Objects: compose API Documentation} + * + * @throws {Error} if a non-array is provided as sources argument. + * @throws {Error} if no sources are provided. + * @throws {Error} if no destination is provided. + * + * @param {string[]|File[]} sources The source files that will be + * combined. + * @param {string|File} destination The file you would like the + * source files combined into. + * @param {CombineOptions} [options] Configuration options. + * @param {string} [options.kmsKeyName] Resource name of the Cloud KMS key, of + * the form + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`, + * that will be used to encrypt the object. Overwrites the object + * metadata's `kms_key_name` value, if any. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + + * @param {CombineCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const logBucket = storage.bucket('log-bucket'); + * + * const sources = [ + * logBucket.file('2013-logs.txt'), + * logBucket.file('2014-logs.txt') + * ]; + * + * const allLogs = logBucket.file('all-logs.txt'); + * + * logBucket.combine(sources, allLogs, function(err, newFile, apiResponse) { + * // newFile === allLogs + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * logBucket.combine(sources, allLogs).then(function(data) { + * const newFile = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + combine(sources, destination, optionsOrCallback, callback) { + var _a; + if (!Array.isArray(sources) || sources.length === 0) { + throw new Error(BucketExceptionMessages.PROVIDE_SOURCE_FILE); + } + if (!destination) { + throw new Error(BucketExceptionMessages.DESTINATION_FILE_NOT_SPECIFIED); + } + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, // Not relevant but param is required + AvailableServiceObjectMethods.setMetadata, // Same as above + options); + const convertToFile = (file) => { + if (file instanceof file_js_1.File) { + return file; + } + return this.file(file); + }; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + sources = sources.map(convertToFile); + const destinationFile = convertToFile(destination); + callback = callback || index_js_1.util.noop; + if (!destinationFile.metadata.contentType) { + const destinationContentType = mime_1.default.getType(destinationFile.name) || undefined; + if (destinationContentType) { + destinationFile.metadata.contentType = destinationContentType; + } + } + let maxRetries = this.storage.retryOptions.maxRetries; + if ((((_a = destinationFile === null || destinationFile === void 0 ? void 0 : destinationFile.instancePreconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === + undefined && + options.ifGenerationMatch === undefined && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + maxRetries = 0; + } + if (options.ifGenerationMatch === undefined) { + Object.assign(options, destinationFile.instancePreconditionOpts, options); + } + // Make the request from the destination File object. + destinationFile.request({ + method: 'POST', + uri: '/compose', + maxRetries, + json: { + destination: { + contentType: destinationFile.metadata.contentType, + contentEncoding: destinationFile.metadata.contentEncoding, + }, + sourceObjects: sources.map(source => { + const sourceObject = { + name: source.name, + }; + if (source.metadata && source.metadata.generation) { + sourceObject.generation = parseInt(source.metadata.generation.toString()); + } + return sourceObject; + }), + }, + qs: options, + }, (err, resp) => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + if (err) { + callback(err, null, resp); + return; + } + callback(null, destinationFile, resp); + }); + } + /** + * See a {@link https://cloud.google.com/storage/docs/json_api/v1/objects/watchAll| Objects: watchAll request body}. + * + * @typedef {object} CreateChannelConfig + * @property {string} address The address where notifications are + * delivered for this channel. + * @property {string} [delimiter] Returns results in a directory-like mode. + * @property {number} [maxResults] Maximum number of `items` plus `prefixes` + * to return in a single page of responses. + * @property {string} [pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @property {string} [prefix] Filter results to objects whose names begin + * with this prefix. + * @property {string} [projection=noAcl] Set of properties to return. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {boolean} [versions=false] If `true`, lists all versions of an object + * as distinct results. + */ + /** + * @typedef {object} CreateChannelOptions + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} CreateChannelResponse + * @property {Channel} 0 The new {@link Channel}. + * @property {object} 1 The full API response. + */ + /** + * @callback CreateChannelCallback + * @param {?Error} err Request error, if any. + * @param {Channel} channel The new {@link Channel}. + * @param {object} apiResponse The full API response. + */ + /** + * Create a channel that will be notified when objects in this bucket changes. + * + * @throws {Error} If an ID is not provided. + * @throws {Error} If an address is not provided. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/watchAll| Objects: watchAll API Documentation} + * + * @param {string} id The ID of the channel to create. + * @param {CreateChannelConfig} config Configuration for creating channel. + * @param {string} config.address The address where notifications are + * delivered for this channel. + * @param {string} [config.delimiter] Returns results in a directory-like mode. + * @param {number} [config.maxResults] Maximum number of `items` plus `prefixes` + * to return in a single page of responses. + * @param {string} [config.pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @param {string} [config.prefix] Filter results to objects whose names begin + * with this prefix. + * @param {string} [config.projection=noAcl] Set of properties to return. + * @param {string} [config.userProject] The ID of the project which will be + * billed for the request. + * @param {boolean} [config.versions=false] If `true`, lists all versions of an object + * as distinct results. + * @param {CreateChannelOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {CreateChannelCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const id = 'new-channel-id'; + * + * const config = { + * address: 'https://...' + * }; + * + * bucket.createChannel(id, config, function(err, channel, apiResponse) { + * if (!err) { + * // Channel created successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.createChannel(id, config).then(function(data) { + * const channel = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + createChannel(id, config, optionsOrCallback, callback) { + if (typeof id !== 'string') { + throw new Error(BucketExceptionMessages.CHANNEL_ID_REQUIRED); + } + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.request({ + method: 'POST', + uri: '/o/watch', + json: Object.assign({ + id, + type: 'web_hook', + }, config), + qs: options, + }, (err, apiResponse) => { + if (err) { + callback(err, null, apiResponse); + return; + } + const resourceId = apiResponse.resourceId; + const channel = this.storage.channel(id, resourceId); + channel.metadata = apiResponse; + callback(null, channel, apiResponse); + }); + } + /** + * Metadata to set for the Notification. + * + * @typedef {object} CreateNotificationOptions + * @property {object} [customAttributes] An optional list of additional + * attributes to attach to each Cloud PubSub message published for this + * notification subscription. + * @property {string[]} [eventTypes] If present, only send notifications about + * listed event types. If empty, sent notifications for all event types. + * @property {string} [objectNamePrefix] If present, only apply this + * notification configuration to object names that begin with this prefix. + * @property {string} [payloadFormat] The desired content of the Payload. + * Defaults to `JSON_API_V1`. + * + * Acceptable values are: + * - `JSON_API_V1` + * + * - `NONE` + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback CreateNotificationCallback + * @param {?Error} err Request error, if any. + * @param {Notification} notification The new {@link Notification}. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} CreateNotificationResponse + * @property {Notification} 0 The new {@link Notification}. + * @property {object} 1 The full API response. + */ + /** + * Creates a notification subscription for the bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/insert| Notifications: insert} + * + * @param {Topic|string} topic The Cloud PubSub topic to which this + * subscription publishes. If the project ID is omitted, the current + * project ID will be used. + * + * Acceptable formats are: + * - `projects/grape-spaceship-123/topics/my-topic` + * + * - `my-topic` + * @param {CreateNotificationOptions} [options] Metadata to set for the + * notification. + * @param {object} [options.customAttributes] An optional list of additional + * attributes to attach to each Cloud PubSub message published for this + * notification subscription. + * @param {string[]} [options.eventTypes] If present, only send notifications about + * listed event types. If empty, sent notifications for all event types. + * @param {string} [options.objectNamePrefix] If present, only apply this + * notification configuration to object names that begin with this prefix. + * @param {string} [options.payloadFormat] The desired content of the Payload. + * Defaults to `JSON_API_V1`. + * + * Acceptable values are: + * - `JSON_API_V1` + * + * - `NONE` + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {CreateNotificationCallback} [callback] Callback function. + * @returns {Promise} + * @throws {Error} If a valid topic is not provided. + * @see Notification#create + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const callback = function(err, notification, apiResponse) { + * if (!err) { + * // The notification was created successfully. + * } + * }; + * + * myBucket.createNotification('my-topic', callback); + * + * //- + * // Configure the nofiication by providing Notification metadata. + * //- + * const metadata = { + * objectNamePrefix: 'prefix-' + * }; + * + * myBucket.createNotification('my-topic', metadata, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.createNotification('my-topic').then(function(data) { + * const notification = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/createNotification.js + * region_tag:storage_create_bucket_notifications + * Another example: + */ + createNotification(topic, optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + const topicIsObject = topic !== null && typeof topic === 'object'; + if (topicIsObject && index_js_1.util.isCustomType(topic, 'pubsub/topic')) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + topic = topic.name; + } + if (typeof topic !== 'string') { + throw new Error(BucketExceptionMessages.TOPIC_NAME_REQUIRED); + } + const body = Object.assign({ topic }, options); + if (body.topic.indexOf('projects') !== 0) { + body.topic = 'projects/{{projectId}}/topics/' + body.topic; + } + body.topic = `//pubsub.${this.storage.universeDomain}/` + body.topic; + if (!body.payloadFormat) { + body.payloadFormat = 'JSON_API_V1'; + } + const query = {}; + if (body.userProject) { + query.userProject = body.userProject; + delete body.userProject; + } + this.request({ + method: 'POST', + uri: '/notificationConfigs', + json: (0, util_js_1.convertObjKeysToSnakeCase)(body), + qs: query, + maxRetries: 0, //explicitly set this value since this is a non-idempotent function + }, (err, apiResponse) => { + if (err) { + callback(err, null, apiResponse); + return; + } + const notification = this.notification(apiResponse.id); + notification.metadata = apiResponse; + callback(null, notification, apiResponse); + }); + } + /** + * @typedef {object} DeleteFilesOptions Query object. See {@link Bucket#getFiles} + * for all of the supported properties. + * @property {boolean} [force] Suppress errors until all files have been + * processed. + */ + /** + * @callback DeleteFilesCallback + * @param {?Error|?Error[]} err Request error, if any, or array of errors from + * files that were not able to be deleted. + * @param {object} [apiResponse] The full API response. + */ + /** + * Iterate over the bucket's files, calling `file.delete()` on each. + * + * This is not an atomic request. A delete attempt will be + * made for each file individually. Any one can fail, in which case only a + * portion of the files you intended to be deleted would have. + * + * Operations are performed in parallel, up to 10 at once. The first error + * breaks the loop and will execute the provided callback with it. Specify + * `{ force: true }` to suppress the errors until all files have had a chance + * to be processed. + * + * File preconditions cannot be passed to this function. It will not retry unless + * the idempotency strategy is set to retry always. + * + * The `query` object passed as the first argument will also be passed to + * {@link Bucket#getFiles}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/delete| Objects: delete API Documentation} + * + * @param {DeleteFilesOptions} [query] Query object. See {@link Bucket#getFiles} + * @param {boolean} [query.force] Suppress errors until all files have been + * processed. + * @param {DeleteFilesCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Delete all of the files in the bucket. + * //- + * bucket.deleteFiles(function(err) {}); + * + * //- + * // By default, if a file cannot be deleted, this method will stop deleting + * // files from your bucket. You can override this setting with `force: + * // true`. + * //- + * bucket.deleteFiles({ + * force: true + * }, function(errors) { + * // `errors`: + * // Array of errors if any occurred, otherwise null. + * }); + * + * //- + * // The first argument to this method acts as a query to + * // {@link Bucket#getFiles}. As an example, you can delete files + * // which match a prefix. + * //- + * bucket.deleteFiles({ + * prefix: 'images/' + * }, function(err) { + * if (!err) { + * // All files in the `images` directory have been deleted. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.deleteFiles().then(function() {}); + * ``` + */ + deleteFiles(queryOrCallback, callback) { + let query = {}; + if (typeof queryOrCallback === 'function') { + callback = queryOrCallback; + } + else if (queryOrCallback) { + query = queryOrCallback; + } + const MAX_PARALLEL_LIMIT = 10; + const MAX_QUEUE_SIZE = 1000; + const errors = []; + const deleteFile = (file) => { + return file.delete(query).catch(err => { + if (!query.force) { + throw err; + } + errors.push(err); + }); + }; + (async () => { + try { + let promises = []; + const limit = (0, p_limit_1.default)(MAX_PARALLEL_LIMIT); + const filesStream = this.getFilesStream(query); + for await (const curFile of filesStream) { + if (promises.length >= MAX_QUEUE_SIZE) { + await Promise.all(promises); + promises = []; + } + promises.push(limit(() => deleteFile(curFile)).catch(e => { + filesStream.destroy(); + throw e; + })); + } + await Promise.all(promises); + callback(errors.length > 0 ? errors : null); + } + catch (e) { + callback(e); + return; + } + })(); + } + /** + * @deprecated + * @typedef {array} DeleteLabelsResponse + * @property {object} 0 The full API response. + */ + /** + * @deprecated + * @callback DeleteLabelsCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata Bucket's metadata. + */ + /** + * @deprecated Use setMetadata directly + * Delete one or more labels from this bucket. + * + * @param {string|string[]} [labels] The labels to delete. If no labels are + * provided, all of the labels are removed. + * @param {DeleteLabelsCallback} [callback] Callback function. + * @param {DeleteLabelsOptions} [options] Options, including precondition options + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Delete all of the labels from this bucket. + * //- + * bucket.deleteLabels(function(err, apiResponse) {}); + * + * //- + * // Delete a single label. + * //- + * bucket.deleteLabels('labelone', function(err, apiResponse) {}); + * + * //- + * // Delete a specific set of labels. + * //- + * bucket.deleteLabels([ + * 'labelone', + * 'labeltwo' + * ], function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.deleteLabels().then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + deleteLabels(labelsOrCallbackOrOptions, optionsOrCallback, callback) { + let labels = new Array(); + let options = {}; + if (typeof labelsOrCallbackOrOptions === 'function') { + callback = labelsOrCallbackOrOptions; + } + else if (typeof labelsOrCallbackOrOptions === 'string') { + labels = [labelsOrCallbackOrOptions]; + } + else if (Array.isArray(labelsOrCallbackOrOptions)) { + labels = labelsOrCallbackOrOptions; + } + else if (labelsOrCallbackOrOptions) { + options = labelsOrCallbackOrOptions; + } + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + const deleteLabels = (labels) => { + const nullLabelMap = labels.reduce((nullLabelMap, labelKey) => { + nullLabelMap[labelKey] = null; + return nullLabelMap; + }, {}); + if ((options === null || options === void 0 ? void 0 : options.ifMetagenerationMatch) !== undefined) { + this.setLabels(nullLabelMap, options, callback); + } + else { + this.setLabels(nullLabelMap, callback); + } + }; + if (labels.length === 0) { + this.getLabels((err, labels) => { + if (err) { + callback(err); + return; + } + deleteLabels(Object.keys(labels)); + }); + } + else { + deleteLabels(labels); + } + } + /** + * @typedef {array} DisableRequesterPaysResponse + * @property {object} 0 The full API response. + */ + /** + * @callback DisableRequesterPaysCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + *

+ * Early Access Testers Only + *

+ * This feature is not yet widely-available. + *

+ *
+ * + * Disable `requesterPays` functionality from this bucket. + * + * @param {DisableRequesterPaysCallback} [callback] Callback function. + * @param {DisableRequesterPaysOptions} [options] Options, including precondition options + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.disableRequesterPays(function(err, apiResponse) { + * if (!err) { + * // requesterPays functionality disabled successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.disableRequesterPays().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/requesterPays.js + * region_tag:storage_disable_requester_pays + * Example of disabling requester pays: + */ + disableRequesterPays(optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.setMetadata({ + billing: { + requesterPays: false, + }, + }, options, callback); + } + /** + * Configuration object for enabling logging. + * + * @typedef {object} EnableLoggingOptions + * @property {string|Bucket} [bucket] The bucket for the log entries. By + * default, the current bucket is used. + * @property {string} prefix A unique prefix for log object names. + */ + /** + * Enable logging functionality for this bucket. This will make two API + * requests, first to grant Cloud Storage WRITE permission to the bucket, then + * to set the appropriate configuration on the Bucket's metadata. + * + * @param {EnableLoggingOptions} config Configuration options. + * @param {string|Bucket} [config.bucket] The bucket for the log entries. By + * default, the current bucket is used. + * @param {string} config.prefix A unique prefix for log object names. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * const config = { + * prefix: 'log' + * }; + * + * bucket.enableLogging(config, function(err, apiResponse) { + * if (!err) { + * // Logging functionality enabled successfully. + * } + * }); + * + * ``` + * @example + * Optionally, provide a destination bucket. + * ``` + * const config = { + * prefix: 'log', + * bucket: 'destination-bucket' + * }; + * + * bucket.enableLogging(config, function(err, apiResponse) {}); + * ``` + * + * @example + * If the callback is omitted, we'll return a Promise. + * ``` + * bucket.enableLogging(config).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + enableLogging(config, callback) { + if (!config || + typeof config === 'function' || + typeof config.prefix === 'undefined') { + throw new Error(BucketExceptionMessages.CONFIGURATION_OBJECT_PREFIX_REQUIRED); + } + let logBucket = this.id; + if (config.bucket && config.bucket instanceof Bucket) { + logBucket = config.bucket.id; + } + else if (config.bucket && typeof config.bucket === 'string') { + logBucket = config.bucket; + } + const options = {}; + if (config === null || config === void 0 ? void 0 : config.ifMetagenerationMatch) { + options.ifMetagenerationMatch = config.ifMetagenerationMatch; + } + if (config === null || config === void 0 ? void 0 : config.ifMetagenerationNotMatch) { + options.ifMetagenerationNotMatch = config.ifMetagenerationNotMatch; + } + (async () => { + try { + const [policy] = await this.iam.getPolicy(); + policy.bindings.push({ + members: ['group:cloud-storage-analytics@google.com'], + role: 'roles/storage.objectCreator', + }); + await this.iam.setPolicy(policy); + this.setMetadata({ + logging: { + logBucket, + logObjectPrefix: config.prefix, + }, + }, options, callback); + } + catch (e) { + callback(e); + return; + } + })(); + } + /** + * @typedef {array} EnableRequesterPaysResponse + * @property {object} 0 The full API response. + */ + /** + * @callback EnableRequesterPaysCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + *
+ * Early Access Testers Only + *

+ * This feature is not yet widely-available. + *

+ *
+ * + * Enable `requesterPays` functionality for this bucket. This enables you, the + * bucket owner, to have the requesting user assume the charges for the access + * to your bucket and its contents. + * + * @param {EnableRequesterPaysCallback | EnableRequesterPaysOptions} [optionsOrCallback] + * Callback function or precondition options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.enableRequesterPays(function(err, apiResponse) { + * if (!err) { + * // requesterPays functionality enabled successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.enableRequesterPays().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/requesterPays.js + * region_tag:storage_enable_requester_pays + * Example of enabling requester pays: + */ + enableRequesterPays(optionsOrCallback, cb) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + cb = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.setMetadata({ + billing: { + requesterPays: true, + }, + }, options, cb); + } + /** + * Create a {@link File} object. See {@link File} to see how to handle + * the different use cases you may have. + * + * @param {string} name The name of the file in this bucket. + * @param {FileOptions} [options] Configuration options. + * @param {string|number} [options.generation] Only use a specific revision of + * this file. + * @param {string} [options.encryptionKey] A custom encryption key. See + * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. + * @param {string} [options.kmsKeyName] The name of the Cloud KMS key that will + * be used to encrypt the object. Must be in the format: + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. + * KMS key ring must use the same location as the bucket. + * @param {string} [options.userProject] The ID of the project which will be + * billed for all requests made from File object. + * @returns {File} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const file = bucket.file('my-existing-file.png'); + * ``` + */ + file(name, options) { + if (!name) { + throw Error(BucketExceptionMessages.SPECIFY_FILE_NAME); + } + return new file_js_1.File(this, name, options); + } + /** + * @typedef {array} GetFilesResponse + * @property {File[]} 0 Array of {@link File} instances. + * @param {object} nextQuery 1 A query object to receive more results. + * @param {object} apiResponse 2 The full API response. + */ + /** + * @callback GetFilesCallback + * @param {?Error} err Request error, if any. + * @param {File[]} files Array of {@link File} instances. + * @param {object} nextQuery A query object to receive more results. + * @param {object} apiResponse The full API response. + */ + /** + * Query object for listing files. + * + * @typedef {object} GetFilesOptions + * @property {boolean} [autoPaginate=true] Have pagination handled + * automatically. + * @property {string} [delimiter] Results will contain only objects whose + * names, aside from the prefix, do not contain delimiter. Objects whose + * names, aside from the prefix, contain delimiter will have their name + * truncated after the delimiter, returned in `apiResponse.prefixes`. + * Duplicate prefixes are omitted. + * @property {string} [endOffset] Filter results to objects whose names are + * lexicographically before endOffset. If startOffset is also set, the objects + * listed have names between startOffset (inclusive) and endOffset (exclusive). + * @property {boolean} [includeFoldersAsPrefixes] If true, includes folders and + * managed folders in the set of prefixes returned by the query. Only applicable if + * delimiter is set to / and autoPaginate is set to false. + * See: https://cloud.google.com/storage/docs/managed-folders + * @property {boolean} [includeTrailingDelimiter] If true, objects that end in + * exactly one instance of delimiter have their metadata included in items[] + * in addition to the relevant part of the object name appearing in prefixes[]. + * @property {string} [prefix] Filter results to objects whose names begin + * with this prefix. + * @property {string} [matchGlob] A glob pattern used to filter results, + * for example foo*bar + * @property {number} [maxApiCalls] Maximum number of API calls to make. + * @property {number} [maxResults] Maximum number of items plus prefixes to + * return per call. + * Note: By default will handle pagination automatically + * if more than 1 page worth of results are requested per call. + * When `autoPaginate` is set to `false` the smaller of `maxResults` + * or 1 page of results will be returned per call. + * @property {string} [pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @property {boolean} [softDeleted] If true, only soft-deleted object versions will be + * listed as distinct results in order of generation number. Note `soft_deleted` and + * `versions` cannot be set to true simultaneously. + * @property {string} [startOffset] Filter results to objects whose names are + * lexicographically equal to or after startOffset. If endOffset is also set, + * the objects listed have names between startOffset (inclusive) and endOffset (exclusive). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {boolean} [versions] If true, returns File objects scoped to + * their versions. + */ + /** + * Get {@link File} objects for the files currently in the bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/list| Objects: list API Documentation} + * + * @param {GetFilesOptions} [query] Query object for listing files. + * @param {boolean} [query.autoPaginate=true] Have pagination handled + * automatically. + * @param {string} [query.delimiter] Results will contain only objects whose + * names, aside from the prefix, do not contain delimiter. Objects whose + * names, aside from the prefix, contain delimiter will have their name + * truncated after the delimiter, returned in `apiResponse.prefixes`. + * Duplicate prefixes are omitted. + * @param {string} [query.endOffset] Filter results to objects whose names are + * lexicographically before endOffset. If startOffset is also set, the objects + * listed have names between startOffset (inclusive) and endOffset (exclusive). + * @param {boolean} [query.includeFoldersAsPrefixes] If true, includes folders and + * managed folders in the set of prefixes returned by the query. Only applicable if + * delimiter is set to / and autoPaginate is set to false. + * See: https://cloud.google.com/storage/docs/managed-folders + * @param {boolean} [query.includeTrailingDelimiter] If true, objects that end in + * exactly one instance of delimiter have their metadata included in items[] + * in addition to the relevant part of the object name appearing in prefixes[]. + * @param {string} [query.prefix] Filter results to objects whose names begin + * with this prefix. + * @param {number} [query.maxApiCalls] Maximum number of API calls to make. + * @param {number} [query.maxResults] Maximum number of items plus prefixes to + * return per call. + * Note: By default will handle pagination automatically + * if more than 1 page worth of results are requested per call. + * When `autoPaginate` is set to `false` the smaller of `maxResults` + * or 1 page of results will be returned per call. + * @param {string} [query.pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @param {boolean} [query.softDeleted] If true, only soft-deleted object versions will be + * listed as distinct results in order of generation number. Note `soft_deleted` and + * `versions` cannot be set to true simultaneously. + * @param {string} [query.startOffset] Filter results to objects whose names are + * lexicographically equal to or after startOffset. If endOffset is also set, + * the objects listed have names between startOffset (inclusive) and endOffset (exclusive). + * @param {string} [query.userProject] The ID of the project which will be + * billed for the request. + * @param {boolean} [query.versions] If true, returns File objects scoped to + * their versions. + * @param {GetFilesCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getFiles(function(err, files) { + * if (!err) { + * // files is an array of File objects. + * } + * }); + * + * //- + * // If your bucket has versioning enabled, you can get all of your files + * // scoped to their generation. + * //- + * bucket.getFiles({ + * versions: true + * }, function(err, files) { + * // Each file is scoped to its generation. + * }); + * + * //- + * // To control how many API requests are made and page through the results + * // manually, set `autoPaginate` to `false`. + * //- + * const callback = function(err, files, nextQuery, apiResponse) { + * if (nextQuery) { + * // More results exist. + * bucket.getFiles(nextQuery, callback); + * } + * + * // The `metadata` property is populated for you with the metadata at the + * // time of fetching. + * files[0].metadata; + * + * // However, in cases where you are concerned the metadata could have + * // changed, use the `getMetadata` method. + * files[0].getMetadata(function(err, metadata) {}); + * }; + * + * bucket.getFiles({ + * autoPaginate: false + * }, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getFiles().then(function(data) { + * const files = data[0]; + * }); + * + * ``` + * @example + *
Simulating a File System

With `autoPaginate: false`, it's possible to iterate over files which incorporate a common structure using a delimiter.

Consider the following remote objects:

  1. "a"
  2. "a/b/c/d"
  3. "b/d/e"

Using a delimiter of `/` will return a single file, "a".

`apiResponse.prefixes` will return the "sub-directories" that were found:

  1. "a/"
  2. "b/"
+ * ``` + * bucket.getFiles({ + * autoPaginate: false, + * delimiter: '/' + * }, function(err, files, nextQuery, apiResponse) { + * // files = [ + * // {File} // File object for file "a" + * // ] + * + * // apiResponse.prefixes = [ + * // 'a/', + * // 'b/' + * // ] + * }); + * ``` + * + * @example + * Using prefixes, it's now possible to simulate a file system with follow-up requests. + * ``` + * bucket.getFiles({ + * autoPaginate: false, + * delimiter: '/', + * prefix: 'a/' + * }, function(err, files, nextQuery, apiResponse) { + * // No files found within "directory" a. + * // files = [] + * + * // However, a "sub-directory" was found. + * // This prefix can be used to continue traversing the "file system". + * // apiResponse.prefixes = [ + * // 'a/b/' + * // ] + * }); + * ``` + * + * @example include:samples/files.js + * region_tag:storage_list_files + * Another example: + * + * @example include:samples/files.js + * region_tag:storage_list_files_with_prefix + * Example of listing files, filtered by a prefix: + */ + getFiles(queryOrCallback, callback) { + let query = typeof queryOrCallback === 'object' ? queryOrCallback : {}; + if (!callback) { + callback = queryOrCallback; + } + query = Object.assign({}, query); + if (query.fields && + query.autoPaginate && + !query.fields.includes('nextPageToken')) { + query.fields = `${query.fields},nextPageToken`; + } + this.request({ + uri: '/o', + qs: query, + }, (err, resp) => { + if (err) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + callback(err, null, null, resp); + return; + } + const itemsArray = resp.items ? resp.items : []; + const files = itemsArray.map((file) => { + const options = {}; + if (query.fields) { + const fileInstance = file; + return fileInstance; + } + if (query.versions) { + options.generation = file.generation; + } + if (file.kmsKeyName) { + options.kmsKeyName = file.kmsKeyName; + } + const fileInstance = this.file(file.name, options); + fileInstance.metadata = file; + return fileInstance; + }); + let nextQuery = null; + if (resp.nextPageToken) { + nextQuery = Object.assign({}, query, { + pageToken: resp.nextPageToken, + }); + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + callback(null, files, nextQuery, resp); + }); + } + /** + * @deprecated + * @typedef {object} GetLabelsOptions Configuration options for Bucket#getLabels(). + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @deprecated + * @typedef {array} GetLabelsResponse + * @property {object} 0 Object of labels currently set on this bucket. + */ + /** + * @deprecated + * @callback GetLabelsCallback + * @param {?Error} err Request error, if any. + * @param {object} labels Object of labels currently set on this bucket. + */ + /** + * @deprecated Use getMetadata directly. + * Get the labels currently set on this bucket. + * + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetLabelsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getLabels(function(err, labels) { + * if (err) { + * // Error handling omitted. + * } + * + * // labels = { + * // label: 'labelValue', + * // ... + * // } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getLabels().then(function(data) { + * const labels = data[0]; + * }); + * ``` + */ + getLabels(optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.getMetadata(options, (err, metadata) => { + if (err) { + callback(err, null); + return; + } + callback(null, (metadata === null || metadata === void 0 ? void 0 : metadata.labels) || {}); + }); + } + /** + * @typedef {object} GetNotificationsOptions Configuration options for Bucket#getNotification(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback GetNotificationsCallback + * @param {?Error} err Request error, if any. + * @param {Notification[]} notifications Array of {@link Notification} + * instances. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} GetNotificationsResponse + * @property {Notification[]} 0 Array of {@link Notification} instances. + * @property {object} 1 The full API response. + */ + /** + * Retrieves a list of notification subscriptions for a given bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/list| Notifications: list} + * + * @param {GetNotificationsOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetNotificationsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * bucket.getNotifications(function(err, notifications, apiResponse) { + * if (!err) { + * // notifications is an array of Notification objects. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getNotifications().then(function(data) { + * const notifications = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/listNotifications.js + * region_tag:storage_list_bucket_notifications + * Another example: + */ + getNotifications(optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.request({ + uri: '/notificationConfigs', + qs: options, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + const itemsArray = resp.items ? resp.items : []; + const notifications = itemsArray.map((notification) => { + const notificationInstance = this.notification(notification.id); + notificationInstance.metadata = notification; + return notificationInstance; + }); + callback(null, notifications, resp); + }); + } + /** + * @typedef {array} GetSignedUrlResponse + * @property {object} 0 The signed URL. + */ + /** + * @callback GetSignedUrlCallback + * @param {?Error} err Request error, if any. + * @param {object} url The signed URL. + */ + /** + * @typedef {object} GetBucketSignedUrlConfig + * @property {string} action Only listing objects within a bucket (HTTP: GET) is supported for bucket-level signed URLs. + * @property {*} expires A timestamp when this link will expire. Any value + * given is passed to `new Date()`. + * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now. + * @property {string} [version='v2'] The signing version to use, either + * 'v2' or 'v4'. + * @property {boolean} [virtualHostedStyle=false] Use virtual hosted-style + * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style + * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs + * should generally be preferred instaed of path-style URL. + * Currently defaults to `false` for path-style, although this may change in a + * future major-version release. + * @property {string} [cname] The cname for this bucket, i.e., + * "https://cdn.example.com". + * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example} + * @property {object} [extensionHeaders] If these headers are used, the + * server will check to make sure that the client provides matching + * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers} + * for the requirements of this feature, most notably: + * - The header name must be prefixed with `x-goog-` + * - The header name must be all lowercase + * + * Note: Multi-valued header passed as an array in the extensionHeaders + * object is converted into a string, delimited by `,` with + * no space. Requests made using the signed URL will need to + * delimit multi-valued headers using a single `,` as well, or + * else the server will report a mismatched signature. + * @property {object} [queryParams] Additional query parameters to include + * in the signed URL. + */ + /** + * Get a signed URL to allow limited time access to a bucket. + * + * In Google Cloud Platform environments, such as Cloud Functions and App + * Engine, you usually don't provide a `keyFilename` or `credentials` during + * instantiation. In those environments, we call the + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} + * to create a signed URL. That API requires either the + * `https://www.googleapis.com/auth/iam` or + * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are + * enabled. + * + * See {@link https://cloud.google.com/storage/docs/access-control/signed-urls| Signed URLs Reference} + * + * @throws {Error} if an expiration timestamp from the past is given. + * + * @param {GetBucketSignedUrlConfig} config Configuration object. + * @param {string} config.action Currently only supports "list" (HTTP: GET). + * @param {*} config.expires A timestamp when this link will expire. Any value + * given is passed to `new Date()`. + * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now. + * @param {string} [config.version='v2'] The signing version to use, either + * 'v2' or 'v4'. + * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style + * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style + * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs + * should generally be preferred instaed of path-style URL. + * Currently defaults to `false` for path-style, although this may change in a + * future major-version release. + * @param {string} [config.cname] The cname for this bucket, i.e., + * "https://cdn.example.com". + * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example} + * @param {object} [config.extensionHeaders] If these headers are used, the + * server will check to make sure that the client provides matching + * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers} + * for the requirements of this feature, most notably: + * - The header name must be prefixed with `x-goog-` + * - The header name must be all lowercase + * + * Note: Multi-valued header passed as an array in the extensionHeaders + * object is converted into a string, delimited by `,` with + * no space. Requests made using the signed URL will need to + * delimit multi-valued headers using a single `,` as well, or + * else the server will report a mismatched signature. + * @property {object} [config.queryParams] Additional query parameters to include + * in the signed URL. + * @param {GetSignedUrlCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * //- + * // Generate a URL that allows temporary access to list files in a bucket. + * //- + * const request = require('request'); + * + * const config = { + * action: 'list', + * expires: '03-17-2025' + * }; + * + * bucket.getSignedUrl(config, function(err, url) { + * if (err) { + * console.error(err); + * return; + * } + * + * // The bucket is now available to be listed from this URL. + * request(url, function(err, resp) { + * // resp.statusCode = 200 + * }); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getSignedUrl(config).then(function(data) { + * const url = data[0]; + * }); + * ``` + */ + getSignedUrl(cfg, callback) { + const method = BucketActionToHTTPMethod[cfg.action]; + const signConfig = { + method, + expires: cfg.expires, + version: cfg.version, + cname: cfg.cname, + extensionHeaders: cfg.extensionHeaders || {}, + queryParams: cfg.queryParams || {}, + host: cfg.host, + signingEndpoint: cfg.signingEndpoint, + }; + if (!this.signer) { + this.signer = new signer_js_1.URLSigner(this.storage.authClient, this, undefined, this.storage); + } + this.signer + .getSignedUrl(signConfig) + .then(signedUrl => callback(null, signedUrl), callback); + } + /** + * @callback BucketLockCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Lock a previously-defined retention policy. This will prevent changes to + * the policy. + * + * @throws {Error} if a metageneration is not provided. + * + * @param {number|string} metageneration The bucket's metageneration. This is + * accesssible from calling {@link File#getMetadata}. + * @param {BucketLockCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const bucket = storage.bucket('albums'); + * + * const metageneration = 2; + * + * bucket.lock(metageneration, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.lock(metageneration).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + lock(metageneration, callback) { + const metatype = typeof metageneration; + if (metatype !== 'number' && metatype !== 'string') { + throw new Error(BucketExceptionMessages.METAGENERATION_NOT_PROVIDED); + } + this.request({ + method: 'POST', + uri: '/lockRetentionPolicy', + qs: { + ifMetagenerationMatch: metageneration, + }, + }, callback); + } + /** + * @typedef {object} RestoreOptions Options for Bucket#restore(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/restore#resource| Object resource}. + * @param {number} [generation] If present, selects a specific revision of this object. + * @param {string} [projection] Specifies the set of properties to return. If used, must be 'full' or 'noAcl'. + */ + /** + * Restores a soft-deleted bucket + * @param {RestoreOptions} options Restore options. + * @returns {Promise} + */ + async restore(options) { + const [bucket] = await this.request({ + method: 'POST', + uri: '/restore', + qs: options, + }); + return bucket; + } + /** + * @typedef {array} MakeBucketPrivateResponse + * @property {File[]} 0 List of files made private. + */ + /** + * @callback MakeBucketPrivateCallback + * @param {?Error} err Request error, if any. + * @param {File[]} files List of files made private. + */ + /** + * @typedef {object} MakeBucketPrivateOptions + * @property {boolean} [includeFiles=false] Make each file in the bucket + * private. + * @property {Metadata} [metadata] Define custom metadata properties to define + * along with the operation. + * @property {boolean} [force] Queue errors occurred while making files + * private until all files have been processed. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Make the bucket listing private. + * + * You may also choose to make the contents of the bucket private by + * specifying `includeFiles: true`. This will automatically run + * {@link File#makePrivate} for every file in the bucket. + * + * When specifying `includeFiles: true`, use `force: true` to delay execution + * of your callback until all files have been processed. By default, the + * callback is executed after the first error. Use `force` to queue such + * errors until all files have been processed, after which they will be + * returned as an array as the first argument to your callback. + * + * NOTE: This may cause the process to be long-running and use a high number + * of requests. Use with caution. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} + * + * @param {MakeBucketPrivateOptions} [options] Configuration options. + * @param {boolean} [options.includeFiles=false] Make each file in the bucket + * private. + * @param {Metadata} [options.metadata] Define custom metadata properties to define + * along with the operation. + * @param {boolean} [options.force] Queue errors occurred while making files + * private until all files have been processed. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {MakeBucketPrivateCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Make the bucket private. + * //- + * bucket.makePrivate(function(err) {}); + * + * //- + * // Make the bucket and its contents private. + * //- + * const opts = { + * includeFiles: true + * }; + * + * bucket.makePrivate(opts, function(err, files) { + * // `err`: + * // The first error to occur, otherwise null. + * // + * // `files`: + * // Array of files successfully made private in the bucket. + * }); + * + * //- + * // Make the bucket and its contents private, using force to suppress errors + * // until all files have been processed. + * //- + * const opts = { + * includeFiles: true, + * force: true + * }; + * + * bucket.makePrivate(opts, function(errors, files) { + * // `errors`: + * // Array of errors if any occurred, otherwise null. + * // + * // `files`: + * // Array of files successfully made private in the bucket. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.makePrivate(opts).then(function(data) { + * const files = data[0]; + * }); + * ``` + */ + makePrivate(optionsOrCallback, callback) { + var _a, _b, _c, _d; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + options.private = true; + const query = { + predefinedAcl: 'projectPrivate', + }; + if (options.userProject) { + query.userProject = options.userProject; + } + if ((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) { + query.ifGenerationMatch = options.preconditionOpts.ifGenerationMatch; + } + if ((_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationNotMatch) { + query.ifGenerationNotMatch = + options.preconditionOpts.ifGenerationNotMatch; + } + if ((_c = options.preconditionOpts) === null || _c === void 0 ? void 0 : _c.ifMetagenerationMatch) { + query.ifMetagenerationMatch = + options.preconditionOpts.ifMetagenerationMatch; + } + if ((_d = options.preconditionOpts) === null || _d === void 0 ? void 0 : _d.ifMetagenerationNotMatch) { + query.ifMetagenerationNotMatch = + options.preconditionOpts.ifMetagenerationNotMatch; + } + // You aren't allowed to set both predefinedAcl & acl properties on a bucket + // so acl must explicitly be nullified. + const metadata = { ...options.metadata, acl: null }; + this.setMetadata(metadata, query, (err) => { + if (err) { + callback(err); + } + const internalCall = () => { + if (options.includeFiles) { + return (0, util_1.promisify)(this.makeAllFilesPublicPrivate_).call(this, options); + } + return Promise.resolve([]); + }; + internalCall() + .then(files => callback(null, files)) + .catch(callback); + }); + } + /** + * @typedef {object} MakeBucketPublicOptions + * @property {boolean} [includeFiles=false] Make each file in the bucket + * private. + * @property {boolean} [force] Queue errors occurred while making files + * private until all files have been processed. + */ + /** + * @callback MakeBucketPublicCallback + * @param {?Error} err Request error, if any. + * @param {File[]} files List of files made public. + */ + /** + * @typedef {array} MakeBucketPublicResponse + * @property {File[]} 0 List of files made public. + */ + /** + * Make the bucket publicly readable. + * + * You may also choose to make the contents of the bucket publicly readable by + * specifying `includeFiles: true`. This will automatically run + * {@link File#makePublic} for every file in the bucket. + * + * When specifying `includeFiles: true`, use `force: true` to delay execution + * of your callback until all files have been processed. By default, the + * callback is executed after the first error. Use `force` to queue such + * errors until all files have been processed, after which they will be + * returned as an array as the first argument to your callback. + * + * NOTE: This may cause the process to be long-running and use a high number + * of requests. Use with caution. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} + * + * @param {MakeBucketPublicOptions} [options] Configuration options. + * @param {boolean} [options.includeFiles=false] Make each file in the bucket + * private. + * @param {boolean} [options.force] Queue errors occurred while making files + * private until all files have been processed. + * @param {MakeBucketPublicCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Make the bucket publicly readable. + * //- + * bucket.makePublic(function(err) {}); + * + * //- + * // Make the bucket and its contents publicly readable. + * //- + * const opts = { + * includeFiles: true + * }; + * + * bucket.makePublic(opts, function(err, files) { + * // `err`: + * // The first error to occur, otherwise null. + * // + * // `files`: + * // Array of files successfully made public in the bucket. + * }); + * + * //- + * // Make the bucket and its contents publicly readable, using force to + * // suppress errors until all files have been processed. + * //- + * const opts = { + * includeFiles: true, + * force: true + * }; + * + * bucket.makePublic(opts, function(errors, files) { + * // `errors`: + * // Array of errors if any occurred, otherwise null. + * // + * // `files`: + * // Array of files successfully made public in the bucket. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.makePublic(opts).then(function(data) { + * const files = data[0]; + * }); + * ``` + */ + makePublic(optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const req = { public: true, ...options }; + this.acl + .add({ + entity: 'allUsers', + role: 'READER', + }) + .then(() => { + return this.acl.default.add({ + entity: 'allUsers', + role: 'READER', + }); + }) + .then(() => { + if (req.includeFiles) { + return (0, util_1.promisify)(this.makeAllFilesPublicPrivate_).call(this, req); + } + return []; + }) + .then(files => callback(null, files), callback); + } + /** + * Get a reference to a Cloud Pub/Sub Notification. + * + * @param {string} id ID of notification. + * @returns {Notification} + * @see Notification + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const notification = bucket.notification('1'); + * ``` + */ + notification(id) { + if (!id) { + throw new Error(BucketExceptionMessages.SUPPLY_NOTIFICATION_ID); + } + return new notification_js_1.Notification(this, id); + } + /** + * Remove an already-existing retention policy from this bucket, if it is not + * locked. + * + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @param {SetBucketMetadataOptions} [options] Options, including precondition options + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const bucket = storage.bucket('albums'); + * + * bucket.removeRetentionPeriod(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.removeRetentionPeriod().then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + removeRetentionPeriod(optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + this.setMetadata({ + retentionPolicy: null, + }, options, callback); + } + /** + * Makes request and applies userProject query parameter if necessary. + * + * @private + * + * @param {object} reqOpts - The request options. + * @param {function} callback - The callback function. + */ + request(reqOpts, callback) { + if (this.userProject && (!reqOpts.qs || !reqOpts.qs.userProject)) { + reqOpts.qs = { ...reqOpts.qs, userProject: this.userProject }; + } + return super.request(reqOpts, callback); + } + /** + * @deprecated + * @typedef {array} SetLabelsResponse + * @property {object} 0 The bucket metadata. + */ + /** + * @deprecated + * @callback SetLabelsCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata The bucket metadata. + */ + /** + * @deprecated + * @typedef {object} SetLabelsOptions Configuration options for Bucket#setLabels(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @deprecated Use setMetadata directly. + * Set labels on the bucket. + * + * This makes an underlying call to {@link Bucket#setMetadata}, which + * is a PATCH request. This means an individual label can be overwritten, but + * unmentioned labels will not be touched. + * + * @param {object} labels Labels to set on the bucket. + * @param {SetLabelsOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {SetLabelsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * const labels = { + * labelone: 'labelonevalue', + * labeltwo: 'labeltwovalue' + * }; + * + * bucket.setLabels(labels, function(err, metadata) { + * if (!err) { + * // Labels set successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setLabels(labels).then(function(data) { + * const metadata = data[0]; + * }); + * ``` + */ + setLabels(labels, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + callback = callback || index_js_1.util.noop; + this.setMetadata({ labels }, options, callback); + } + setMetadata(metadata, optionsOrCallback, cb) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = + typeof optionsOrCallback === 'function' + ? optionsOrCallback + : cb; + this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, AvailableServiceObjectMethods.setMetadata, options); + super + .setMetadata(metadata, options) + .then(resp => cb(null, ...resp)) + .catch(cb) + .finally(() => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + }); + } + /** + * Lock all objects contained in the bucket, based on their creation time. Any + * attempt to overwrite or delete objects younger than the retention period + * will result in a `PERMISSION_DENIED` error. + * + * An unlocked retention policy can be modified or removed from the bucket via + * {@link File#removeRetentionPeriod} and {@link File#setRetentionPeriod}. A + * locked retention policy cannot be removed or shortened in duration for the + * lifetime of the bucket. Attempting to remove or decrease period of a locked + * retention policy will result in a `PERMISSION_DENIED` error. You can still + * increase the policy. + * + * @param {*} duration In seconds, the minimum retention time for all objects + * contained in this bucket. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @param {SetBucketMetadataCallback} [options] Options, including precondition options. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const bucket = storage.bucket('albums'); + * + * const DURATION_SECONDS = 15780000; // 6 months. + * + * //- + * // Lock the objects in this bucket for 6 months. + * //- + * bucket.setRetentionPeriod(DURATION_SECONDS, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setRetentionPeriod(DURATION_SECONDS).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + setRetentionPeriod(duration, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + this.setMetadata({ + retentionPolicy: { + retentionPeriod: duration.toString(), + }, + }, options, callback); + } + /** + * + * @typedef {object} Cors + * @property {number} [maxAgeSeconds] The number of seconds the browser is + * allowed to make requests before it must repeat the preflight request. + * @property {string[]} [method] HTTP method allowed for cross origin resource + * sharing with this bucket. + * @property {string[]} [origin] an origin allowed for cross origin resource + * sharing with this bucket. + * @property {string[]} [responseHeader] A header allowed for cross origin + * resource sharing with this bucket. + */ + /** + * This can be used to set the CORS configuration on the bucket. + * + * The configuration will be overwritten with the value passed into this. + * + * @param {Cors[]} corsConfiguration The new CORS configuration to set + * @param {number} [corsConfiguration.maxAgeSeconds] The number of seconds the browser is + * allowed to make requests before it must repeat the preflight request. + * @param {string[]} [corsConfiguration.method] HTTP method allowed for cross origin resource + * sharing with this bucket. + * @param {string[]} [corsConfiguration.origin] an origin allowed for cross origin resource + * sharing with this bucket. + * @param {string[]} [corsConfiguration.responseHeader] A header allowed for cross origin + * resource sharing with this bucket. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @param {SetBucketMetadataOptions} [options] Options, including precondition options. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const bucket = storage.bucket('albums'); + * + * const corsConfiguration = [{maxAgeSeconds: 3600}]; // 1 hour + * bucket.setCorsConfiguration(corsConfiguration); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setCorsConfiguration(corsConfiguration).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + setCorsConfiguration(corsConfiguration, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + this.setMetadata({ + cors: corsConfiguration, + }, options, callback); + } + /** + * @typedef {object} SetBucketStorageClassOptions + * @property {string} [userProject] - The ID of the project which will be + * billed for the request. + */ + /** + * @callback SetBucketStorageClassCallback + * @param {?Error} err Request error, if any. + */ + /** + * Set the default storage class for new files in this bucket. + * + * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes} + * + * @param {string} storageClass The new storage class. (`standard`, + * `nearline`, `coldline`, or `archive`). + * **Note:** The storage classes `multi_regional`, `regional`, and + * `durable_reduced_availability` are now legacy and will be deprecated in + * the future. + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] - The ID of the project which will be + * billed for the request. + * @param {SetStorageClassCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.setStorageClass('nearline', function(err, apiResponse) { + * if (err) { + * // Error handling omitted. + * } + * + * // The storage class was updated successfully. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setStorageClass('nearline').then(function() {}); + * ``` + */ + setStorageClass(storageClass, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + // In case we get input like `storageClass`, convert to `storage_class`. + storageClass = storageClass + .replace(/-/g, '_') + .replace(/([a-z])([A-Z])/g, (_, low, up) => { + return low + '_' + up; + }) + .toUpperCase(); + this.setMetadata({ storageClass }, options, callback); + } + /** + * Set a user project to be billed for all requests made from this Bucket + * object and any files referenced from this Bucket object. + * + * @param {string} userProject The user project. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.setUserProject('grape-spaceship-123'); + * ``` + */ + setUserProject(userProject) { + this.userProject = userProject; + const methods = [ + 'create', + 'delete', + 'exists', + 'get', + 'getMetadata', + 'setMetadata', + ]; + methods.forEach(method => { + const methodConfig = this.methods[method]; + if (typeof methodConfig === 'object') { + if (typeof methodConfig.reqOpts === 'object') { + Object.assign(methodConfig.reqOpts.qs, { userProject }); + } + else { + methodConfig.reqOpts = { + qs: { userProject }, + }; + } + } + }); + } + /** + * @typedef {object} UploadOptions Configuration options for Bucket#upload(). + * @property {string|File} [destination] The place to save + * your file. If given a string, the file will be uploaded to the bucket + * using the string as a filename. When given a File object, your local + * file will be uploaded to the File object's bucket and under the File + * object's name. Lastly, when this argument is omitted, the file is uploaded + * to your bucket using the name of the local file. + * @property {string} [encryptionKey] A custom encryption key. See + * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. + * @property {boolean} [gzip] Automatically gzip the file. This will set + * `options.metadata.contentEncoding` to `gzip`. + * @property {string} [kmsKeyName] The name of the Cloud KMS key that will + * be used to encrypt the object. Must be in the format: + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. + * @property {object} [metadata] See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body}. + * @property {string} [offset] The starting byte of the upload stream, for + * resuming an interrupted upload. Defaults to 0. + * @property {string} [predefinedAcl] Apply a predefined set of access + * controls to this object. + * + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @property {boolean} [private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @property {boolean} [public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @property {boolean} [resumable=true] Resumable uploads are automatically + * enabled and must be shut off explicitly by setting to false. + * @property {number} [timeout=60000] Set the HTTP request timeout in + * milliseconds. This option is not available for resumable uploads. + * Default: `60000` + * @property {string} [uri] The URI for an already-created resumable + * upload. See {@link File#createResumableUpload}. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string|boolean} [validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with an + * MD5 checksum for maximum reliability. CRC32c will provide better + * performance with less reliability. You may also choose to skip + * validation completely, however this is **not recommended**. + */ + /** + * @typedef {array} UploadResponse + * @property {object} 0 The uploaded {@link File}. + * @property {object} 1 The full API response. + */ + /** + * @callback UploadCallback + * @param {?Error} err Request error, if any. + * @param {object} file The uploaded {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * Upload a file to the bucket. This is a convenience method that wraps + * {@link File#createWriteStream}. + * + * Resumable uploads are enabled by default + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload#uploads| Upload Options (Simple or Resumable)} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert| Objects: insert API Documentation} + * + * @param {string} pathString The fully qualified path to the file you + * wish to upload to your bucket. + * @param {UploadOptions} [options] Configuration options. + * @param {string|File} [options.destination] The place to save + * your file. If given a string, the file will be uploaded to the bucket + * using the string as a filename. When given a File object, your local + * file will be uploaded to the File object's bucket and under the File + * object's name. Lastly, when this argument is omitted, the file is uploaded + * to your bucket using the name of the local file. + * @param {string} [options.encryptionKey] A custom encryption key. See + * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. + * @param {boolean} [options.gzip] Automatically gzip the file. This will set + * `options.metadata.contentEncoding` to `gzip`. + * @param {string} [options.kmsKeyName] The name of the Cloud KMS key that will + * be used to encrypt the object. Must be in the format: + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. + * @param {object} [options.metadata] See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body}. + * @param {string} [options.offset] The starting byte of the upload stream, for + * resuming an interrupted upload. Defaults to 0. + * @param {string} [options.predefinedAcl] Apply a predefined set of access + * controls to this object. + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @param {boolean} [options.private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @param {boolean} [options.public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @param {boolean} [options.resumable=true] Resumable uploads are automatically + * enabled and must be shut off explicitly by setting to false. + * @param {number} [options.timeout=60000] Set the HTTP request timeout in + * milliseconds. This option is not available for resumable uploads. + * Default: `60000` + * @param {string} [options.uri] The URI for an already-created resumable + * upload. See {@link File#createResumableUpload}. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {string|boolean} [options.validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with an + * MD5 checksum for maximum reliability. CRC32c will provide better + * performance with less reliability. You may also choose to skip + * validation completely, however this is **not recommended**. + * @param {UploadCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Upload a file from a local path. + * //- + * bucket.upload('/local/path/image.png', function(err, file, apiResponse) { + * // Your bucket now contains: + * // - "image.png" (with the contents of `/local/path/image.png') + * + * // `file` is an instance of a File object that refers to your new file. + * }); + * + * + * //- + * // It's not always that easy. You will likely want to specify the filename + * // used when your new file lands in your bucket. + * // + * // You may also want to set metadata or customize other options. + * //- + * const options = { + * destination: 'new-image.png', + * validation: 'crc32c', + * metadata: { + * metadata: { + * event: 'Fall trip to the zoo' + * } + * } + * }; + * + * bucket.upload('local-image.png', options, function(err, file) { + * // Your bucket now contains: + * // - "new-image.png" (with the contents of `local-image.png') + * + * // `file` is an instance of a File object that refers to your new file. + * }); + * + * //- + * // You can also have a file gzip'd on the fly. + * //- + * bucket.upload('index.html', { gzip: true }, function(err, file) { + * // Your bucket now contains: + * // - "index.html" (automatically compressed with gzip) + * + * // Downloading the file with `file.download` will automatically decode + * the + * // file. + * }); + * + * //- + * // You may also re-use a File object, {File}, that references + * // the file you wish to create or overwrite. + * //- + * const options = { + * destination: bucket.file('existing-file.png'), + * resumable: false + * }; + * + * bucket.upload('local-img.png', options, function(err, newFile) { + * // Your bucket now contains: + * // - "existing-file.png" (with the contents of `local-img.png') + * + * // Note: + * // The `newFile` parameter is equal to `file`. + * }); + * + * //- + * // To use + * // + * // Customer-supplied Encryption Keys, provide the `encryptionKey` + * option. + * //- + * const crypto = require('crypto'); + * const encryptionKey = crypto.randomBytes(32); + * + * bucket.upload('img.png', { + * encryptionKey: encryptionKey + * }, function(err, newFile) { + * // `img.png` was uploaded with your custom encryption key. + * + * // `newFile` is already configured to use the encryption key when making + * // operations on the remote object. + * + * // However, to use your encryption key later, you must create a `File` + * // instance with the `key` supplied: + * const file = bucket.file('img.png', { + * encryptionKey: encryptionKey + * }); + * + * // Or with `file#setEncryptionKey`: + * const file = bucket.file('img.png'); + * file.setEncryptionKey(encryptionKey); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.upload('local-image.png').then(function(data) { + * const file = data[0]; + * }); + * + * To upload a file from a URL, use {@link File#createWriteStream}. + * + * ``` + * @example include:samples/files.js + * region_tag:storage_upload_file + * Another example: + * + * @example include:samples/encryption.js + * region_tag:storage_upload_encrypted_file + * Example of uploading an encrypted file: + */ + upload(pathString, optionsOrCallback, callback) { + var _a, _b; + const upload = (numberOfRetries) => { + const returnValue = (0, async_retry_1.default)(async (bail) => { + await new Promise((resolve, reject) => { + var _a, _b; + if (numberOfRetries === 0 && + ((_b = (_a = newFile === null || newFile === void 0 ? void 0 : newFile.storage) === null || _a === void 0 ? void 0 : _a.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry)) { + newFile.storage.retryOptions.autoRetry = false; + } + const writable = newFile.createWriteStream(options); + if (options.onUploadProgress) { + writable.on('progress', options.onUploadProgress); + } + fs.createReadStream(pathString) + .on('error', bail) + .pipe(writable) + .on('error', err => { + if (this.storage.retryOptions.autoRetry && + this.storage.retryOptions.retryableErrorFn(err)) { + return reject(err); + } + else { + return bail(err); + } + }) + .on('finish', () => { + return resolve(); + }); + }); + }, { + retries: numberOfRetries, + factor: this.storage.retryOptions.retryDelayMultiplier, + maxTimeout: this.storage.retryOptions.maxRetryDelay * 1000, //convert to milliseconds + maxRetryTime: this.storage.retryOptions.totalTimeout * 1000, //convert to milliseconds + }); + if (!callback) { + return returnValue; + } + else { + return returnValue + .then(() => { + if (callback) { + return callback(null, newFile, newFile.metadata); + } + }) + .catch(callback); + } + }; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + if (global['GCLOUD_SANDBOX_ENV']) { + return; + } + let options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + options = Object.assign({ + metadata: {}, + }, options); + // Do not retry if precondition option ifGenerationMatch is not set + // because this is a file operation + let maxRetries = this.storage.retryOptions.maxRetries; + if ((((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined && + ((_b = this.instancePreconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + maxRetries = 0; + } + let newFile; + if (options.destination instanceof file_js_1.File) { + newFile = options.destination; + } + else if (options.destination !== null && + typeof options.destination === 'string') { + // Use the string as the name of the file. + newFile = this.file(options.destination, { + encryptionKey: options.encryptionKey, + kmsKeyName: options.kmsKeyName, + preconditionOpts: this.instancePreconditionOpts, + }); + } + else { + // Resort to using the name of the incoming file. + const destination = path.basename(pathString); + newFile = this.file(destination, { + encryptionKey: options.encryptionKey, + kmsKeyName: options.kmsKeyName, + preconditionOpts: this.instancePreconditionOpts, + }); + } + upload(maxRetries); + } + /** + * @private + * + * @typedef {object} MakeAllFilesPublicPrivateOptions + * @property {boolean} [force] Suppress errors until all files have been + * processed. + * @property {boolean} [private] Make files private. + * @property {boolean} [public] Make files public. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @private + * + * @callback SetBucketMetadataCallback + * @param {?Error} err Request error, if any. + * @param {File[]} files Files that were updated. + */ + /** + * @typedef {array} MakeAllFilesPublicPrivateResponse + * @property {File[]} 0 List of files affected. + */ + /** + * Iterate over all of a bucket's files, calling `file.makePublic()` (public) + * or `file.makePrivate()` (private) on each. + * + * Operations are performed in parallel, up to 10 at once. The first error + * breaks the loop, and will execute the provided callback with it. Specify + * `{ force: true }` to suppress the errors. + * + * @private + * + * @param {MakeAllFilesPublicPrivateOptions} [options] Configuration options. + * @param {boolean} [options.force] Suppress errors until all files have been + * processed. + * @param {boolean} [options.private] Make files private. + * @param {boolean} [options.public] Make files public. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + + * @param {MakeAllFilesPublicPrivateCallback} callback Callback function. + * + * @return {Promise} + */ + makeAllFilesPublicPrivate_(optionsOrCallback, callback) { + const MAX_PARALLEL_LIMIT = 10; + const errors = []; + const updatedFiles = []; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const processFile = async (file) => { + try { + await (options.public ? file.makePublic() : file.makePrivate(options)); + updatedFiles.push(file); + } + catch (e) { + if (!options.force) { + throw e; + } + errors.push(e); + } + }; + this.getFiles(options) + .then(([files]) => { + const limit = (0, p_limit_1.default)(MAX_PARALLEL_LIMIT); + const promises = files.map(file => { + return limit(() => processFile(file)); + }); + return Promise.all(promises); + }) + .then(() => callback(errors.length > 0 ? errors : null, updatedFiles), err => callback(err, updatedFiles)); + } + getId() { + return this.id; + } + disableAutoRetryConditionallyIdempotent_( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + coreOpts, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + methodType, localPreconditionOptions) { + var _a, _b; + if (typeof coreOpts === 'object' && + ((_b = (_a = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _a === void 0 ? void 0 : _a.qs) === null || _b === void 0 ? void 0 : _b.ifMetagenerationMatch) === undefined && + (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifMetagenerationMatch) === undefined && + (methodType === AvailableServiceObjectMethods.setMetadata || + methodType === AvailableServiceObjectMethods.delete) && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) { + this.storage.retryOptions.autoRetry = false; + } + else if (this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + this.storage.retryOptions.autoRetry = false; + } + } +} +exports.Bucket = Bucket; +/*! Developer Documentation + * + * These methods can be auto-paginated. + */ +paginator_1.paginator.extend(Bucket, 'getFiles'); +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Bucket, { + exclude: ['cloudStorageURI', 'request', 'file', 'notification', 'restore'], +}); + + +/***/ }), + +/***/ 2658: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Channel = void 0; +const index_js_1 = __nccwpck_require__(1325); +const promisify_1 = __nccwpck_require__(206); +/** + * Create a channel object to interact with a Cloud Storage channel. + * + * See {@link https://cloud.google.com/storage/docs/object-change-notification| Object Change Notification} + * + * @class + * + * @param {string} id The ID of the channel. + * @param {string} resourceId The resource ID of the channel. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const channel = storage.channel('id', 'resource-id'); + * ``` + */ +class Channel extends index_js_1.ServiceObject { + constructor(storage, id, resourceId) { + const config = { + parent: storage, + baseUrl: '/channels', + // An ID shouldn't be included in the API requests. + // RE: + // https://github.com/GoogleCloudPlatform/google-cloud-node/issues/1145 + id: '', + methods: { + // Only need `request`. + }, + }; + super(config); + this.metadata.id = id; + this.metadata.resourceId = resourceId; + } + /** + * @typedef {array} StopResponse + * @property {object} 0 The full API response. + */ + /** + * @callback StopCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Stop this channel. + * + * @param {StopCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const channel = storage.channel('id', 'resource-id'); + * channel.stop(function(err, apiResponse) { + * if (!err) { + * // Channel stopped successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * channel.stop().then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + stop(callback) { + callback = callback || index_js_1.util.noop; + this.request({ + method: 'POST', + uri: '/stop', + json: this.metadata, + }, (err, apiResponse) => { + callback(err, apiResponse); + }); + } +} +exports.Channel = Channel; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Channel); + + +/***/ }), + +/***/ 4317: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _CRC32C_crc32c; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.CRC32C_EXTENSION_TABLE = exports.CRC32C_EXTENSIONS = exports.CRC32C_EXCEPTION_MESSAGES = exports.CRC32C_DEFAULT_VALIDATOR_GENERATOR = exports.CRC32C = void 0; +const fs_1 = __nccwpck_require__(9896); +/** + * Ported from {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc#L16-L59 github.com/google/crc32c} + */ +const CRC32C_EXTENSIONS = [ + 0x00000000, 0xf26b8303, 0xe13b70f7, 0x1350f3f4, 0xc79a971f, 0x35f1141c, + 0x26a1e7e8, 0xd4ca64eb, 0x8ad958cf, 0x78b2dbcc, 0x6be22838, 0x9989ab3b, + 0x4d43cfd0, 0xbf284cd3, 0xac78bf27, 0x5e133c24, 0x105ec76f, 0xe235446c, + 0xf165b798, 0x030e349b, 0xd7c45070, 0x25afd373, 0x36ff2087, 0xc494a384, + 0x9a879fa0, 0x68ec1ca3, 0x7bbcef57, 0x89d76c54, 0x5d1d08bf, 0xaf768bbc, + 0xbc267848, 0x4e4dfb4b, 0x20bd8ede, 0xd2d60ddd, 0xc186fe29, 0x33ed7d2a, + 0xe72719c1, 0x154c9ac2, 0x061c6936, 0xf477ea35, 0xaa64d611, 0x580f5512, + 0x4b5fa6e6, 0xb93425e5, 0x6dfe410e, 0x9f95c20d, 0x8cc531f9, 0x7eaeb2fa, + 0x30e349b1, 0xc288cab2, 0xd1d83946, 0x23b3ba45, 0xf779deae, 0x05125dad, + 0x1642ae59, 0xe4292d5a, 0xba3a117e, 0x4851927d, 0x5b016189, 0xa96ae28a, + 0x7da08661, 0x8fcb0562, 0x9c9bf696, 0x6ef07595, 0x417b1dbc, 0xb3109ebf, + 0xa0406d4b, 0x522bee48, 0x86e18aa3, 0x748a09a0, 0x67dafa54, 0x95b17957, + 0xcba24573, 0x39c9c670, 0x2a993584, 0xd8f2b687, 0x0c38d26c, 0xfe53516f, + 0xed03a29b, 0x1f682198, 0x5125dad3, 0xa34e59d0, 0xb01eaa24, 0x42752927, + 0x96bf4dcc, 0x64d4cecf, 0x77843d3b, 0x85efbe38, 0xdbfc821c, 0x2997011f, + 0x3ac7f2eb, 0xc8ac71e8, 0x1c661503, 0xee0d9600, 0xfd5d65f4, 0x0f36e6f7, + 0x61c69362, 0x93ad1061, 0x80fde395, 0x72966096, 0xa65c047d, 0x5437877e, + 0x4767748a, 0xb50cf789, 0xeb1fcbad, 0x197448ae, 0x0a24bb5a, 0xf84f3859, + 0x2c855cb2, 0xdeeedfb1, 0xcdbe2c45, 0x3fd5af46, 0x7198540d, 0x83f3d70e, + 0x90a324fa, 0x62c8a7f9, 0xb602c312, 0x44694011, 0x5739b3e5, 0xa55230e6, + 0xfb410cc2, 0x092a8fc1, 0x1a7a7c35, 0xe811ff36, 0x3cdb9bdd, 0xceb018de, + 0xdde0eb2a, 0x2f8b6829, 0x82f63b78, 0x709db87b, 0x63cd4b8f, 0x91a6c88c, + 0x456cac67, 0xb7072f64, 0xa457dc90, 0x563c5f93, 0x082f63b7, 0xfa44e0b4, + 0xe9141340, 0x1b7f9043, 0xcfb5f4a8, 0x3dde77ab, 0x2e8e845f, 0xdce5075c, + 0x92a8fc17, 0x60c37f14, 0x73938ce0, 0x81f80fe3, 0x55326b08, 0xa759e80b, + 0xb4091bff, 0x466298fc, 0x1871a4d8, 0xea1a27db, 0xf94ad42f, 0x0b21572c, + 0xdfeb33c7, 0x2d80b0c4, 0x3ed04330, 0xccbbc033, 0xa24bb5a6, 0x502036a5, + 0x4370c551, 0xb11b4652, 0x65d122b9, 0x97baa1ba, 0x84ea524e, 0x7681d14d, + 0x2892ed69, 0xdaf96e6a, 0xc9a99d9e, 0x3bc21e9d, 0xef087a76, 0x1d63f975, + 0x0e330a81, 0xfc588982, 0xb21572c9, 0x407ef1ca, 0x532e023e, 0xa145813d, + 0x758fe5d6, 0x87e466d5, 0x94b49521, 0x66df1622, 0x38cc2a06, 0xcaa7a905, + 0xd9f75af1, 0x2b9cd9f2, 0xff56bd19, 0x0d3d3e1a, 0x1e6dcdee, 0xec064eed, + 0xc38d26c4, 0x31e6a5c7, 0x22b65633, 0xd0ddd530, 0x0417b1db, 0xf67c32d8, + 0xe52cc12c, 0x1747422f, 0x49547e0b, 0xbb3ffd08, 0xa86f0efc, 0x5a048dff, + 0x8ecee914, 0x7ca56a17, 0x6ff599e3, 0x9d9e1ae0, 0xd3d3e1ab, 0x21b862a8, + 0x32e8915c, 0xc083125f, 0x144976b4, 0xe622f5b7, 0xf5720643, 0x07198540, + 0x590ab964, 0xab613a67, 0xb831c993, 0x4a5a4a90, 0x9e902e7b, 0x6cfbad78, + 0x7fab5e8c, 0x8dc0dd8f, 0xe330a81a, 0x115b2b19, 0x020bd8ed, 0xf0605bee, + 0x24aa3f05, 0xd6c1bc06, 0xc5914ff2, 0x37faccf1, 0x69e9f0d5, 0x9b8273d6, + 0x88d28022, 0x7ab90321, 0xae7367ca, 0x5c18e4c9, 0x4f48173d, 0xbd23943e, + 0xf36e6f75, 0x0105ec76, 0x12551f82, 0xe03e9c81, 0x34f4f86a, 0xc69f7b69, + 0xd5cf889d, 0x27a40b9e, 0x79b737ba, 0x8bdcb4b9, 0x988c474d, 0x6ae7c44e, + 0xbe2da0a5, 0x4c4623a6, 0x5f16d052, 0xad7d5351, +]; +exports.CRC32C_EXTENSIONS = CRC32C_EXTENSIONS; +const CRC32C_EXTENSION_TABLE = new Int32Array(CRC32C_EXTENSIONS); +exports.CRC32C_EXTENSION_TABLE = CRC32C_EXTENSION_TABLE; +const CRC32C_DEFAULT_VALIDATOR_GENERATOR = () => new CRC32C(); +exports.CRC32C_DEFAULT_VALIDATOR_GENERATOR = CRC32C_DEFAULT_VALIDATOR_GENERATOR; +const CRC32C_EXCEPTION_MESSAGES = { + INVALID_INIT_BASE64_RANGE: (l) => `base64-encoded data expected to equal 4 bytes, not ${l}`, + INVALID_INIT_BUFFER_LENGTH: (l) => `Buffer expected to equal 4 bytes, not ${l}`, + INVALID_INIT_INTEGER: (l) => `Number expected to be a safe, unsigned 32-bit integer, not ${l}`, +}; +exports.CRC32C_EXCEPTION_MESSAGES = CRC32C_EXCEPTION_MESSAGES; +class CRC32C { + /** + * Constructs a new `CRC32C` object. + * + * Reconstruction is recommended via the `CRC32C.from` static method. + * + * @param initialValue An initial CRC32C value - a signed 32-bit integer. + */ + constructor(initialValue = 0) { + /** Current CRC32C value */ + _CRC32C_crc32c.set(this, 0); + __classPrivateFieldSet(this, _CRC32C_crc32c, initialValue, "f"); + } + /** + * Calculates a CRC32C from a provided buffer. + * + * Implementation inspired from: + * - {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc github.com/google/crc32c} + * - {@link https://github.com/googleapis/python-crc32c/blob/a595e758c08df445a99c3bf132ee8e80a3ec4308/src/google_crc32c/python.py github.com/googleapis/python-crc32c} + * - {@link https://github.com/googleapis/java-storage/pull/1376/files github.com/googleapis/java-storage} + * + * @param data The `Buffer` to generate the CRC32C from + */ + update(data) { + let current = __classPrivateFieldGet(this, _CRC32C_crc32c, "f") ^ 0xffffffff; + for (const d of data) { + const tablePoly = CRC32C.CRC32C_EXTENSION_TABLE[(d ^ current) & 0xff]; + current = tablePoly ^ (current >>> 8); + } + __classPrivateFieldSet(this, _CRC32C_crc32c, current ^ 0xffffffff, "f"); + } + /** + * Validates a provided input to the current CRC32C value. + * + * @param input A Buffer, `CRC32C`-compatible object, base64-encoded data (string), or signed 32-bit integer + */ + validate(input) { + if (typeof input === 'number') { + return input === __classPrivateFieldGet(this, _CRC32C_crc32c, "f"); + } + else if (typeof input === 'string') { + return input === this.toString(); + } + else if (Buffer.isBuffer(input)) { + return Buffer.compare(input, this.toBuffer()) === 0; + } + else { + // `CRC32C`-like object + return input.toString() === this.toString(); + } + } + /** + * Returns a `Buffer` representation of the CRC32C value + */ + toBuffer() { + const buffer = Buffer.alloc(4); + buffer.writeInt32BE(__classPrivateFieldGet(this, _CRC32C_crc32c, "f")); + return buffer; + } + /** + * Returns a JSON-compatible, base64-encoded representation of the CRC32C value. + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify `JSON#stringify`} + */ + toJSON() { + return this.toString(); + } + /** + * Returns a base64-encoded representation of the CRC32C value. + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/toString `Object#toString`} + */ + toString() { + return this.toBuffer().toString('base64'); + } + /** + * Returns the `number` representation of the CRC32C value as a signed 32-bit integer + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/valueOf `Object#valueOf`} + */ + valueOf() { + return __classPrivateFieldGet(this, _CRC32C_crc32c, "f"); + } + /** + * Generates a `CRC32C` from a compatible buffer format. + * + * @param value 4-byte `ArrayBufferView`/`Buffer`/`TypedArray` + */ + static fromBuffer(value) { + let buffer; + if (Buffer.isBuffer(value)) { + buffer = value; + } + else if ('buffer' in value) { + // `ArrayBufferView` + buffer = Buffer.from(value.buffer); + } + else { + // `ArrayBuffer` + buffer = Buffer.from(value); + } + if (buffer.byteLength !== 4) { + throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_BUFFER_LENGTH(buffer.byteLength)); + } + return new CRC32C(buffer.readInt32BE()); + } + static async fromFile(file) { + const crc32c = new CRC32C(); + await new Promise((resolve, reject) => { + (0, fs_1.createReadStream)(file) + .on('data', (d) => { + if (typeof d === 'string') { + crc32c.update(Buffer.from(d)); + } + else { + crc32c.update(d); + } + }) + .on('end', () => resolve()) + .on('error', reject); + }); + return crc32c; + } + /** + * Generates a `CRC32C` from 4-byte base64-encoded data (string). + * + * @param value 4-byte base64-encoded data (string) + */ + static fromString(value) { + const buffer = Buffer.from(value, 'base64'); + if (buffer.byteLength !== 4) { + throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_BASE64_RANGE(buffer.byteLength)); + } + return this.fromBuffer(buffer); + } + /** + * Generates a `CRC32C` from a safe, unsigned 32-bit integer. + * + * @param value an unsigned 32-bit integer + */ + static fromNumber(value) { + if (!Number.isSafeInteger(value) || value > 2 ** 32 || value < -(2 ** 32)) { + throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_INTEGER(value)); + } + return new CRC32C(value); + } + /** + * Generates a `CRC32C` from a variety of compatable types. + * Note: strings are treated as input, not as file paths to read from. + * + * @param value A number, 4-byte `ArrayBufferView`/`Buffer`/`TypedArray`, or 4-byte base64-encoded data (string) + */ + static from(value) { + if (typeof value === 'number') { + return this.fromNumber(value); + } + else if (typeof value === 'string') { + return this.fromString(value); + } + else if ('byteLength' in value) { + // `ArrayBuffer` | `Buffer` | `ArrayBufferView` + return this.fromBuffer(value); + } + else { + // `CRC32CValidator`/`CRC32C`-like + return this.fromString(value.toString()); + } + } +} +exports.CRC32C = CRC32C; +_CRC32C_crc32c = new WeakMap(); +CRC32C.CRC32C_EXTENSIONS = CRC32C_EXTENSIONS; +CRC32C.CRC32C_EXTENSION_TABLE = CRC32C_EXTENSION_TABLE; + + +/***/ }), + +/***/ 9975: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +var _File_instances, _File_validateIntegrity; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.File = exports.FileExceptionMessages = exports.RequestError = exports.STORAGE_POST_POLICY_BASE_URL = exports.ActionToHTTPMethod = void 0; +const index_js_1 = __nccwpck_require__(1325); +const promisify_1 = __nccwpck_require__(206); +const crypto = __importStar(__nccwpck_require__(6982)); +const fs = __importStar(__nccwpck_require__(9896)); +const mime_1 = __importDefault(__nccwpck_require__(4900)); +const resumableUpload = __importStar(__nccwpck_require__(8043)); +const stream_1 = __nccwpck_require__(2203); +const zlib = __importStar(__nccwpck_require__(3106)); +const storage_js_1 = __nccwpck_require__(2848); +const bucket_js_1 = __nccwpck_require__(2887); +const acl_js_1 = __nccwpck_require__(6637); +const signer_js_1 = __nccwpck_require__(7319); +const util_js_1 = __nccwpck_require__(7325); +const duplexify_1 = __importDefault(__nccwpck_require__(9112)); +const util_js_2 = __nccwpck_require__(4557); +const crc32c_js_1 = __nccwpck_require__(4317); +const hash_stream_validator_js_1 = __nccwpck_require__(4873); +const async_retry_1 = __importDefault(__nccwpck_require__(5195)); +var ActionToHTTPMethod; +(function (ActionToHTTPMethod) { + ActionToHTTPMethod["read"] = "GET"; + ActionToHTTPMethod["write"] = "PUT"; + ActionToHTTPMethod["delete"] = "DELETE"; + ActionToHTTPMethod["resumable"] = "POST"; +})(ActionToHTTPMethod || (exports.ActionToHTTPMethod = ActionToHTTPMethod = {})); +/** + * @deprecated - no longer used + */ +exports.STORAGE_POST_POLICY_BASE_URL = 'https://storage.googleapis.com'; +/** + * @private + */ +const GS_URL_REGEXP = /^gs:\/\/([a-z0-9_.-]+)\/(.+)$/; +/** + * @private + * This regex will match compressible content types. These are primarily text/*, +json, +text, +xml content types. + * This was based off of mime-db and may periodically need to be updated if new compressible content types become + * standards. + */ +const COMPRESSIBLE_MIME_REGEX = new RegExp([ + /^text\/|application\/ecmascript|application\/javascript|application\/json/, + /|application\/postscript|application\/rtf|application\/toml|application\/vnd.dart/, + /|application\/vnd.ms-fontobject|application\/wasm|application\/x-httpd-php|application\/x-ns-proxy-autoconfig/, + /|application\/x-sh(?!ockwave-flash)|application\/x-tar|application\/x-virtualbox-hdd|application\/x-virtualbox-ova|application\/x-virtualbox-ovf/, + /|^application\/x-virtualbox-vbox$|application\/x-virtualbox-vdi|application\/x-virtualbox-vhd|application\/x-virtualbox-vmdk/, + /|application\/xml|application\/xml-dtd|font\/otf|font\/ttf|image\/bmp|image\/vnd.adobe.photoshop|image\/vnd.microsoft.icon/, + /|image\/vnd.ms-dds|image\/x-icon|image\/x-ms-bmp|message\/rfc822|model\/gltf-binary|\+json|\+text|\+xml|\+yaml/, +] + .map(r => r.source) + .join(''), 'i'); +class RequestError extends Error { +} +exports.RequestError = RequestError; +const SEVEN_DAYS = 7 * 24 * 60 * 60; +const GS_UTIL_URL_REGEX = /(gs):\/\/([a-z0-9_.-]+)\/(.+)/g; +const HTTPS_PUBLIC_URL_REGEX = /(https):\/\/(storage\.googleapis\.com)\/([a-z0-9_.-]+)\/(.+)/g; +var FileExceptionMessages; +(function (FileExceptionMessages) { + FileExceptionMessages["EXPIRATION_TIME_NA"] = "An expiration time is not available."; + FileExceptionMessages["DESTINATION_NO_NAME"] = "Destination file should have a name."; + FileExceptionMessages["INVALID_VALIDATION_FILE_RANGE"] = "Cannot use validation with file ranges (start/end)."; + FileExceptionMessages["MD5_NOT_AVAILABLE"] = "MD5 verification was specified, but is not available for the requested object. MD5 is not available for composite objects."; + FileExceptionMessages["EQUALS_CONDITION_TWO_ELEMENTS"] = "Equals condition must be an array of 2 elements."; + FileExceptionMessages["STARTS_WITH_TWO_ELEMENTS"] = "StartsWith condition must be an array of 2 elements."; + FileExceptionMessages["CONTENT_LENGTH_RANGE_MIN_MAX"] = "ContentLengthRange must have numeric min & max fields."; + FileExceptionMessages["DOWNLOAD_MISMATCH"] = "The downloaded data did not match the data from the server. To be sure the content is the same, you should download the file again."; + FileExceptionMessages["UPLOAD_MISMATCH_DELETE_FAIL"] = "The uploaded data did not match the data from the server.\n As a precaution, we attempted to delete the file, but it was not successful.\n To be sure the content is the same, you should try removing the file manually,\n then uploading the file again.\n \n\nThe delete attempt failed with this message:\n\n "; + FileExceptionMessages["UPLOAD_MISMATCH"] = "The uploaded data did not match the data from the server.\n As a precaution, the file has been deleted.\n To be sure the content is the same, you should try uploading the file again."; + FileExceptionMessages["MD5_RESUMED_UPLOAD"] = "MD5 cannot be used with a continued resumable upload as MD5 cannot be extended from an existing value"; + FileExceptionMessages["MISSING_RESUME_CRC32C_FINAL_UPLOAD"] = "The CRC32C is missing for the final portion of a resumed upload, which is required for validation. Please provide `resumeCRC32C` if validation is required, or disable `validation`."; +})(FileExceptionMessages || (exports.FileExceptionMessages = FileExceptionMessages = {})); +/** + * A File object is created from your {@link Bucket} object using + * {@link Bucket#file}. + * + * @class + */ +class File extends index_js_1.ServiceObject { + /** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * An ACL consists of one or more entries, where each entry grants permissions + * to an entity. Permissions define the actions that can be performed against + * an object or bucket (for example, `READ` or `WRITE`); the entity defines + * who the permission applies to (for example, a specific user or group of + * users). + * + * The `acl` object on a File instance provides methods to get you a list of + * the ACLs defined on your bucket, as well as set, update, and delete them. + * + * See {@link http://goo.gl/6qBBPO| About Access Control lists} + * + * @name File#acl + * @mixes Acl + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * //- + * // Make a file publicly readable. + * //- + * const options = { + * entity: 'allUsers', + * role: storage.acl.READER_ROLE + * }; + * + * file.acl.add(options, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + /** + * The API-formatted resource description of the file. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name File#metadata + * @type {object} + */ + /** + * The file's name. + * @name File#name + * @type {string} + */ + /** + * @callback Crc32cGeneratorToStringCallback + * A method returning the CRC32C as a base64-encoded string. + * + * @returns {string} + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ + /** + * @callback Crc32cGeneratorValidateCallback + * A method validating a base64-encoded CRC32C string. + * + * @param {string} [value] base64-encoded CRC32C string to validate + * @returns {boolean} + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + **/ + /** + * @callback Crc32cGeneratorUpdateCallback + * A method for passing `Buffer`s for CRC32C generation. + * + * @param {Buffer} [data] data to update CRC32C value with + * @returns {undefined} + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + **/ + /** + * @typedef {object} CRC32CValidator + * @property {Crc32cGeneratorToStringCallback} + * @property {Crc32cGeneratorValidateCallback} + * @property {Crc32cGeneratorUpdateCallback} + */ + /** + * @callback Crc32cGeneratorCallback + * @returns {CRC32CValidator} + */ + /** + * @typedef {object} FileOptions Options passed to the File constructor. + * @property {string} [encryptionKey] A custom encryption key. + * @property {number} [generation] Generation to scope the file to. + * @property {string} [kmsKeyName] Cloud KMS Key used to encrypt this + * object, if the object is encrypted by such a key. Limited availability; + * usable only by enabled projects. + * @property {string} [userProject] The ID of the project which will be + * billed for all requests made from File object. + * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C} + */ + /** + * Constructs a file object. + * + * @param {Bucket} bucket The Bucket instance this file is + * attached to. + * @param {string} name The name of the remote file. + * @param {FileOptions} [options] Configuration options. + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * ``` + */ + constructor(bucket, name, options = {}) { + var _a, _b; + const requestQueryObject = {}; + let generation; + if (options.generation !== null) { + if (typeof options.generation === 'string') { + generation = Number(options.generation); + } + else { + generation = options.generation; + } + if (!isNaN(generation)) { + requestQueryObject.generation = generation; + } + } + Object.assign(requestQueryObject, options.preconditionOpts); + const userProject = options.userProject || bucket.userProject; + if (typeof userProject === 'string') { + requestQueryObject.userProject = userProject; + } + const methods = { + /** + * @typedef {array} DeleteFileResponse + * @property {object} 0 The full API response. + */ + /** + * @callback DeleteFileCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Delete the file. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/delete| Objects: delete API Documentation} + * + * @method File#delete + * @param {object} [options] Configuration options. + * @param {boolean} [options.ignoreNotFound = false] Ignore an error if + * the file does not exist. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {DeleteFileCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * file.delete(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.delete().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_delete_file + * Another example: + */ + delete: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} FileExistsResponse + * @property {boolean} 0 Whether the {@link File} exists. + */ + /** + * @callback FileExistsCallback + * @param {?Error} err Request error, if any. + * @param {boolean} exists Whether the {@link File} exists. + */ + /** + * Check if the file exists. + * + * @method File#exists + * @param {options} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {FileExistsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.exists(function(err, exists) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.exists().then(function(data) { + * const exists = data[0]; + * }); + * ``` + */ + exists: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} GetFileResponse + * @property {File} 0 The {@link File}. + * @property {object} 1 The full API response. + */ + /** + * @callback GetFileCallback + * @param {?Error} err Request error, if any. + * @param {File} file The {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * Get a file object and its metadata if it exists. + * + * @method File#get + * @param {options} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {number} [options.generation] The generation number to get + * @param {string} [options.restoreToken] If this is a soft-deleted object in an HNS-enabled bucket, returns the restore token which will + * be necessary to restore it if there's a name conflict with another object. + * @param {boolean} [options.softDeleted] If true, returns the soft-deleted object. + Object `generation` is required if `softDeleted` is set to True. + * @param {GetFileCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.get(function(err, file, apiResponse) { + * // file.metadata` has been populated. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.get().then(function(data) { + * const file = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + get: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} GetFileMetadataResponse + * @property {object} 0 The {@link File} metadata. + * @property {object} 1 The full API response. + */ + /** + * @callback GetFileMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata The {@link File} metadata. + * @param {object} apiResponse The full API response. + */ + /** + * Get the file's metadata. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/get| Objects: get API Documentation} + * + * @method File#getMetadata + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetFileMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.getMetadata(function(err, metadata, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.getMetadata().then(function(data) { + * const metadata = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_get_metadata + * Another example: + */ + getMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {object} SetFileMetadataOptions Configuration options for File#setMetadata(). + * @param {string} [userProject] The ID of the project which will be billed for the request. + */ + /** + * @callback SetFileMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} SetFileMetadataResponse + * @property {object} 0 The full API response. + */ + /** + * Merge the given metadata with the current remote file's metadata. This + * will set metadata if it was previously unset or update previously set + * metadata. To unset previously set metadata, set its value to null. + * + * You can set custom key/value pairs in the metadata key of the given + * object, however the other properties outside of this object must adhere + * to the {@link https://goo.gl/BOnnCK| official API documentation}. + * + * + * See the examples below for more information. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/patch| Objects: patch API Documentation} + * + * @method File#setMetadata + * @param {object} [metadata] The metadata you wish to update. + * @param {SetFileMetadataOptions} [options] Configuration options. + * @param {SetFileMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * const metadata = { + * contentType: 'application/x-font-ttf', + * metadata: { + * my: 'custom', + * properties: 'go here' + * } + * }; + * + * file.setMetadata(metadata, function(err, apiResponse) {}); + * + * // Assuming current metadata = { hello: 'world', unsetMe: 'will do' } + * file.setMetadata({ + * metadata: { + * abc: '123', // will be set. + * unsetMe: null, // will be unset (deleted). + * hello: 'goodbye' // will be updated from 'world' to 'goodbye'. + * } + * }, function(err, apiResponse) { + * // metadata should now be { abc: '123', hello: 'goodbye' } + * }); + * + * //- + * // Set a temporary hold on this file from its bucket's retention period + * // configuration. + * // + * file.setMetadata({ + * temporaryHold: true + * }, function(err, apiResponse) {}); + * + * //- + * // Alternatively, you may set a temporary hold. This will follow the + * // same behavior as an event-based hold, with the exception that the + * // bucket's retention policy will not renew for this file from the time + * // the hold is released. + * //- + * file.setMetadata({ + * eventBasedHold: true + * }, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.setMetadata(metadata).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + setMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + }; + super({ + parent: bucket, + baseUrl: '/o', + id: encodeURIComponent(name), + methods, + }); + _File_instances.add(this); + this.bucket = bucket; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this.storage = bucket.parent; + // @TODO Can this duplicate code from above be avoided? + if (options.generation !== null) { + let generation; + if (typeof options.generation === 'string') { + generation = Number(options.generation); + } + else { + generation = options.generation; + } + if (!isNaN(generation)) { + this.generation = generation; + } + } + this.kmsKeyName = options.kmsKeyName; + this.userProject = userProject; + this.name = name; + if (options.encryptionKey) { + this.setEncryptionKey(options.encryptionKey); + } + this.acl = new acl_js_1.Acl({ + request: this.request.bind(this), + pathPrefix: '/acl', + }); + this.crc32cGenerator = + options.crc32cGenerator || this.bucket.crc32cGenerator; + this.instanceRetryValue = (_b = (_a = this.storage) === null || _a === void 0 ? void 0 : _a.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry; + this.instancePreconditionOpts = options === null || options === void 0 ? void 0 : options.preconditionOpts; + } + /** + * The object's Cloud Storage URI (`gs://`) + * + * @example + * ```ts + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('image.png'); + * + * // `gs://my-bucket/image.png` + * const href = file.cloudStorageURI.href; + * ``` + */ + get cloudStorageURI() { + const uri = this.bucket.cloudStorageURI; + uri.pathname = this.name; + return uri; + } + /** + * A helper method for determining if a request should be retried based on preconditions. + * This should only be used for methods where the idempotency is determined by + * `ifGenerationMatch` + * @private + * + * A request should not be retried under the following conditions: + * - if precondition option `ifGenerationMatch` is not set OR + * - if `idempotencyStrategy` is set to `RetryNever` + */ + shouldRetryBasedOnPreconditionAndIdempotencyStrat(options) { + var _a; + return !(((options === null || options === void 0 ? void 0 : options.ifGenerationMatch) === undefined && + ((_a = this.instancePreconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever); + } + /** + * @typedef {array} CopyResponse + * @property {File} 0 The copied {@link File}. + * @property {object} 1 The full API response. + */ + /** + * @callback CopyCallback + * @param {?Error} err Request error, if any. + * @param {File} copiedFile The copied {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} CopyOptions Configuration options for File#copy(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @property {string} [cacheControl] The cacheControl setting for the new file. + * @property {string} [contentEncoding] The contentEncoding setting for the new file. + * @property {string} [contentType] The contentType setting for the new file. + * @property {string} [destinationKmsKeyName] Resource name of the Cloud + * KMS key, of the form + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`, + * that will be used to encrypt the object. Overwrites the object + * metadata's `kms_key_name` value, if any. + * @property {Metadata} [metadata] Metadata to specify on the copied file. + * @property {string} [predefinedAcl] Set the ACL for the new file. + * @property {string} [token] A previously-returned `rewriteToken` from an + * unfinished rewrite request. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Copy this file to another file. By default, this will copy the file to the + * same bucket, but you can choose to copy it to another Bucket by providing + * a Bucket or File object or a URL starting with "gs://". + * The generation of the file will not be preserved. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/rewrite| Objects: rewrite API Documentation} + * + * @throws {Error} If the destination file is not provided. + * + * @param {string|Bucket|File} destination Destination file. + * @param {CopyOptions} [options] Configuration options. See an + * @param {CopyCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // You can pass in a variety of types for the destination. + * // + * // For all of the below examples, assume we are working with the following + * // Bucket and File objects. + * //- + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('my-image.png'); + * + * //- + * // If you pass in a string for the destination, the file is copied to its + * // current bucket, under the new name provided. + * //- + * file.copy('my-image-copy.png', function(err, copiedFile, apiResponse) { + * // `my-bucket` now contains: + * // - "my-image.png" + * // - "my-image-copy.png" + * + * // `copiedFile` is an instance of a File object that refers to your new + * // file. + * }); + * + * //- + * // If you pass in a string starting with "gs://" for the destination, the + * // file is copied to the other bucket and under the new name provided. + * //- + * const newLocation = 'gs://another-bucket/my-image-copy.png'; + * file.copy(newLocation, function(err, copiedFile, apiResponse) { + * // `my-bucket` still contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-image-copy.png" + * + * // `copiedFile` is an instance of a File object that refers to your new + * // file. + * }); + * + * //- + * // If you pass in a Bucket object, the file will be copied to that bucket + * // using the same name. + * //- + * const anotherBucket = storage.bucket('another-bucket'); + * file.copy(anotherBucket, function(err, copiedFile, apiResponse) { + * // `my-bucket` still contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-image.png" + * + * // `copiedFile` is an instance of a File object that refers to your new + * // file. + * }); + * + * //- + * // If you pass in a File object, you have complete control over the new + * // bucket and filename. + * //- + * const anotherFile = anotherBucket.file('my-awesome-image.png'); + * file.copy(anotherFile, function(err, copiedFile, apiResponse) { + * // `my-bucket` still contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-awesome-image.png" + * + * // Note: + * // The `copiedFile` parameter is equal to `anotherFile`. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.copy(newLocation).then(function(data) { + * const newFile = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_copy_file + * Another example: + */ + copy(destination, optionsOrCallback, callback) { + var _a, _b; + const noDestinationError = new Error(FileExceptionMessages.DESTINATION_NO_NAME); + if (!destination) { + throw noDestinationError; + } + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = { ...optionsOrCallback }; + } + callback = callback || index_js_1.util.noop; + let destBucket; + let destName; + let newFile; + if (typeof destination === 'string') { + const parsedDestination = GS_URL_REGEXP.exec(destination); + if (parsedDestination !== null && parsedDestination.length === 3) { + destBucket = this.storage.bucket(parsedDestination[1]); + destName = parsedDestination[2]; + } + else { + destBucket = this.bucket; + destName = destination; + } + } + else if (destination instanceof bucket_js_1.Bucket) { + destBucket = destination; + destName = this.name; + } + else if (destination instanceof File) { + destBucket = destination.bucket; + destName = destination.name; + newFile = destination; + } + else { + throw noDestinationError; + } + const query = {}; + if (this.generation !== undefined) { + query.sourceGeneration = this.generation; + } + if (options.token !== undefined) { + query.rewriteToken = options.token; + } + if (options.userProject !== undefined) { + query.userProject = options.userProject; + delete options.userProject; + } + if (options.predefinedAcl !== undefined) { + query.destinationPredefinedAcl = options.predefinedAcl; + delete options.predefinedAcl; + } + newFile = newFile || destBucket.file(destName); + const headers = {}; + if (this.encryptionKey !== undefined) { + headers['x-goog-copy-source-encryption-algorithm'] = 'AES256'; + headers['x-goog-copy-source-encryption-key'] = this.encryptionKeyBase64; + headers['x-goog-copy-source-encryption-key-sha256'] = + this.encryptionKeyHash; + } + if (newFile.encryptionKey !== undefined) { + this.setEncryptionKey(newFile.encryptionKey); + } + else if (options.destinationKmsKeyName !== undefined) { + query.destinationKmsKeyName = options.destinationKmsKeyName; + delete options.destinationKmsKeyName; + } + else if (newFile.kmsKeyName !== undefined) { + query.destinationKmsKeyName = newFile.kmsKeyName; + } + if (query.destinationKmsKeyName) { + this.kmsKeyName = query.destinationKmsKeyName; + const keyIndex = this.interceptors.indexOf(this.encryptionKeyInterceptor); + if (keyIndex > -1) { + this.interceptors.splice(keyIndex, 1); + } + } + if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) { + this.storage.retryOptions.autoRetry = false; + } + if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined) { + query.ifGenerationMatch = (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch; + delete options.preconditionOpts; + } + this.request({ + method: 'POST', + uri: `/rewriteTo/b/${destBucket.name}/o/${encodeURIComponent(newFile.name)}`, + qs: query, + json: options, + headers, + }, (err, resp) => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + if (err) { + callback(err, null, resp); + return; + } + if (resp.rewriteToken) { + const options = { + token: resp.rewriteToken, + }; + if (query.userProject) { + options.userProject = query.userProject; + } + if (query.destinationKmsKeyName) { + options.destinationKmsKeyName = query.destinationKmsKeyName; + } + this.copy(newFile, options, callback); + return; + } + callback(null, newFile, resp); + }); + } + /** + * @typedef {object} CreateReadStreamOptions Configuration options for File#createReadStream. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string|boolean} [validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with a + * CRC32c checksum. You may use MD5 if preferred, but that hash is not + * supported for composite objects. An error will be raised if MD5 is + * specified but is not available. You may also choose to skip validation + * completely, however this is **not recommended**. + * @property {number} [start] A byte offset to begin the file's download + * from. Default is 0. NOTE: Byte ranges are inclusive; that is, + * `options.start = 0` and `options.end = 999` represent the first 1000 + * bytes in a file or object. NOTE: when specifying a byte range, data + * integrity is not available. + * @property {number} [end] A byte offset to stop reading the file at. + * NOTE: Byte ranges are inclusive; that is, `options.start = 0` and + * `options.end = 999` represent the first 1000 bytes in a file or object. + * NOTE: when specifying a byte range, data integrity is not available. + * @property {boolean} [decompress=true] Disable auto decompression of the + * received data. By default this option is set to `true`. + * Applicable in cases where the data was uploaded with + * `gzip: true` option. See {@link File#createWriteStream}. + */ + /** + * Create a readable stream to read the contents of the remote file. It can be + * piped to a writable stream or listened to for 'data' events to read a + * file's contents. + * + * In the unlikely event there is a mismatch between what you downloaded and + * the version in your Bucket, your error handler will receive an error with + * code "CONTENT_DOWNLOAD_MISMATCH". If you receive this error, the best + * recourse is to try downloading the file again. + * + * NOTE: Readable streams will emit the `end` event when the file is fully + * downloaded. + * + * @param {CreateReadStreamOptions} [options] Configuration options. + * @returns {ReadableStream} + * + * @example + * ``` + * //- + * //

Downloading a File

+ * // + * // The example below demonstrates how we can reference a remote file, then + * // pipe its contents to a local file. This is effectively creating a local + * // backup of your remote data. + * //- + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * const fs = require('fs'); + * const remoteFile = bucket.file('image.png'); + * const localFilename = '/Users/stephen/Photos/image.png'; + * + * remoteFile.createReadStream() + * .on('error', function(err) {}) + * .on('response', function(response) { + * // Server connected and responded with the specified status and headers. + * }) + * .on('end', function() { + * // The file is fully downloaded. + * }) + * .pipe(fs.createWriteStream(localFilename)); + * + * //- + * // To limit the downloaded data to only a byte range, pass an options + * // object. + * //- + * const logFile = myBucket.file('access_log'); + * logFile.createReadStream({ + * start: 10000, + * end: 20000 + * }) + * .on('error', function(err) {}) + * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt')); + * + * //- + * // To read a tail byte range, specify only `options.end` as a negative + * // number. + * //- + * const logFile = myBucket.file('access_log'); + * logFile.createReadStream({ + * end: -100 + * }) + * .on('error', function(err) {}) + * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt')); + * ``` + */ + createReadStream(options = {}) { + options = Object.assign({ decompress: true }, options); + const rangeRequest = typeof options.start === 'number' || typeof options.end === 'number'; + const tailRequest = options.end < 0; + let validateStream = undefined; + let request = undefined; + const throughStream = new util_js_2.PassThroughShim(); + let crc32c = true; + let md5 = false; + if (typeof options.validation === 'string') { + const value = options.validation.toLowerCase().trim(); + crc32c = value === 'crc32c'; + md5 = value === 'md5'; + } + else if (options.validation === false) { + crc32c = false; + } + const shouldRunValidation = !rangeRequest && (crc32c || md5); + if (rangeRequest) { + if (typeof options.validation === 'string' || + options.validation === true) { + throw new Error(FileExceptionMessages.INVALID_VALIDATION_FILE_RANGE); + } + // Range requests can't receive data integrity checks. + crc32c = false; + md5 = false; + } + const onComplete = (err) => { + if (err) { + // There is an issue with node-fetch 2.x that if the stream errors the underlying socket connection is not closed. + // This causes a memory leak, so cleanup the sockets manually here by destroying the agent. + if (request === null || request === void 0 ? void 0 : request.agent) { + request.agent.destroy(); + } + throughStream.destroy(err); + } + }; + // We listen to the response event from the request stream so that we + // can... + // + // 1) Intercept any data from going to the user if an error occurred. + // 2) Calculate the hashes from the http.IncomingMessage response + // stream, + // which will return the bytes from the source without decompressing + // gzip'd content. We then send it through decompressed, if + // applicable, to the user. + const onResponse = (err, _body, rawResponseStream) => { + if (err) { + // Get error message from the body. + this.getBufferFromReadable(rawResponseStream).then(body => { + err.message = body.toString('utf8'); + throughStream.destroy(err); + }); + return; + } + request = rawResponseStream.request; + const headers = rawResponseStream.toJSON().headers; + const isCompressed = headers['content-encoding'] === 'gzip'; + const hashes = {}; + // The object is safe to validate if: + // 1. It was stored gzip and returned to us gzip OR + // 2. It was never stored as gzip + const safeToValidate = (headers['x-goog-stored-content-encoding'] === 'gzip' && + isCompressed) || + headers['x-goog-stored-content-encoding'] === 'identity'; + const transformStreams = []; + if (shouldRunValidation) { + // The x-goog-hash header should be set with a crc32c and md5 hash. + // ex: headers['x-goog-hash'] = 'crc32c=xxxx,md5=xxxx' + if (typeof headers['x-goog-hash'] === 'string') { + headers['x-goog-hash'] + .split(',') + .forEach((hashKeyValPair) => { + const delimiterIndex = hashKeyValPair.indexOf('='); + const hashType = hashKeyValPair.substring(0, delimiterIndex); + const hashValue = hashKeyValPair.substring(delimiterIndex + 1); + hashes[hashType] = hashValue; + }); + } + validateStream = new hash_stream_validator_js_1.HashStreamValidator({ + crc32c, + md5, + crc32cGenerator: this.crc32cGenerator, + crc32cExpected: hashes.crc32c, + md5Expected: hashes.md5, + }); + } + if (md5 && !hashes.md5) { + const hashError = new RequestError(FileExceptionMessages.MD5_NOT_AVAILABLE); + hashError.code = 'MD5_NOT_AVAILABLE'; + throughStream.destroy(hashError); + return; + } + if (safeToValidate && shouldRunValidation && validateStream) { + transformStreams.push(validateStream); + } + if (isCompressed && options.decompress) { + transformStreams.push(zlib.createGunzip()); + } + (0, stream_1.pipeline)(rawResponseStream, ...transformStreams, throughStream, onComplete); + }; + // Authenticate the request, then pipe the remote API request to the stream + // returned to the user. + const makeRequest = () => { + const query = { alt: 'media' }; + if (this.generation) { + query.generation = this.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + const headers = { + 'Accept-Encoding': 'gzip', + 'Cache-Control': 'no-store', + }; + if (rangeRequest) { + const start = typeof options.start === 'number' ? options.start : '0'; + const end = typeof options.end === 'number' ? options.end : ''; + headers.Range = `bytes=${tailRequest ? end : `${start}-${end}`}`; + } + const reqOpts = { + uri: '', + headers, + qs: query, + }; + if (options[util_js_1.GCCL_GCS_CMD_KEY]) { + reqOpts[util_js_1.GCCL_GCS_CMD_KEY] = options[util_js_1.GCCL_GCS_CMD_KEY]; + } + this.requestStream(reqOpts) + .on('error', err => { + throughStream.destroy(err); + }) + .on('response', res => { + throughStream.emit('response', res); + index_js_1.util.handleResp(null, res, null, onResponse); + }) + .resume(); + }; + throughStream.on('reading', makeRequest); + return throughStream; + } + /** + * @callback CreateResumableUploadCallback + * @param {?Error} err Request error, if any. + * @param {string} uri The resumable upload's unique session URI. + */ + /** + * @typedef {array} CreateResumableUploadResponse + * @property {string} 0 The resumable upload's unique session URI. + */ + /** + * @typedef {object} CreateResumableUploadOptions + * @property {object} [metadata] Metadata to set on the file. + * @property {number} [offset] The starting byte of the upload stream for resuming an interrupted upload. + * @property {string} [origin] Origin header to set for the upload. + * @property {string} [predefinedAcl] Apply a predefined set of access + * controls to this object. + * + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @property {boolean} [private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @property {boolean} [public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string} [chunkSize] Create a separate request per chunk. This + * value is in bytes and should be a multiple of 256 KiB (2^18). + * {@link https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload| We recommend using at least 8 MiB for the chunk size.} + */ + /** + * Create a unique resumable upload session URI. This is the first step when + * performing a resumable upload. + * + * See the {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload| Resumable upload guide} + * for more on how the entire process works. + * + *

Note

+ * + * If you are just looking to perform a resumable upload without worrying + * about any of the details, see {@link File#createWriteStream}. Resumable + * uploads are performed by default. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload| Resumable upload guide} + * + * @param {CreateResumableUploadOptions} [options] Configuration options. + * @param {CreateResumableUploadCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * file.createResumableUpload(function(err, uri) { + * if (!err) { + * // `uri` can be used to PUT data to. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.createResumableUpload().then(function(data) { + * const uri = data[0]; + * }); + * ``` + */ + createResumableUpload(optionsOrCallback, callback) { + var _a, _b; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const retryOptions = this.storage.retryOptions; + if ((((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined && + ((_b = this.instancePreconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + retryOptions.autoRetry = false; + } + resumableUpload.createURI({ + authClient: this.storage.authClient, + apiEndpoint: this.storage.apiEndpoint, + bucket: this.bucket.name, + customRequestOptions: this.getRequestInterceptors().reduce((reqOpts, interceptorFn) => interceptorFn(reqOpts), {}), + file: this.name, + generation: this.generation, + key: this.encryptionKey, + kmsKeyName: this.kmsKeyName, + metadata: options.metadata, + offset: options.offset, + origin: options.origin, + predefinedAcl: options.predefinedAcl, + private: options.private, + public: options.public, + userProject: options.userProject || this.userProject, + retryOptions: retryOptions, + params: (options === null || options === void 0 ? void 0 : options.preconditionOpts) || this.instancePreconditionOpts, + universeDomain: this.bucket.storage.universeDomain, + [util_js_1.GCCL_GCS_CMD_KEY]: options[util_js_1.GCCL_GCS_CMD_KEY], + }, callback); + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + } + /** + * @typedef {object} CreateWriteStreamOptions Configuration options for File#createWriteStream(). + * @property {string} [contentType] Alias for + * `options.metadata.contentType`. If set to `auto`, the file name is used + * to determine the contentType. + * @property {string|boolean} [gzip] If true, automatically gzip the file. + * If set to `auto`, the contentType is used to determine if the file + * should be gzipped. This will set `options.metadata.contentEncoding` to + * `gzip` if necessary. + * @property {object} [metadata] See the examples below or + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body} + * for more details. + * @property {number} [offset] The starting byte of the upload stream, for + * resuming an interrupted upload. Defaults to 0. + * @property {string} [predefinedAcl] Apply a predefined set of access + * controls to this object. + * + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @property {boolean} [private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @property {boolean} [public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @property {boolean} [resumable] Force a resumable upload. NOTE: When + * working with streams, the file format and size is unknown until it's + * completely consumed. Because of this, it's best for you to be explicit + * for what makes sense given your input. + * @property {number} [timeout=60000] Set the HTTP request timeout in + * milliseconds. This option is not available for resumable uploads. + * Default: `60000` + * @property {string} [uri] The URI for an already-created resumable + * upload. See {@link File#createResumableUpload}. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string|boolean} [validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with a + * CRC32c checksum. You may use MD5 if preferred, but that hash is not + * supported for composite objects. An error will be raised if MD5 is + * specified but is not available. You may also choose to skip validation + * completely, however this is **not recommended**. In addition to specifying + * validation type, providing `metadata.crc32c` or `metadata.md5Hash` will + * cause the server to perform validation in addition to client validation. + * NOTE: Validation is automatically skipped for objects that were + * uploaded using the `gzip` option and have already compressed content. + */ + /** + * Create a writable stream to overwrite the contents of the file in your + * bucket. + * + * A File object can also be used to create files for the first time. + * + * Resumable uploads are automatically enabled and must be shut off explicitly + * by setting `options.resumable` to `false`. + * + * + *

+ * There is some overhead when using a resumable upload that can cause + * noticeable performance degradation while uploading a series of small + * files. When uploading files less than 10MB, it is recommended that the + * resumable feature is disabled. + *

+ * + * NOTE: Writable streams will emit the `finish` event when the file is fully + * uploaded. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload Upload Options (Simple or Resumable)} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert Objects: insert API Documentation} + * + * @param {CreateWriteStreamOptions} [options] Configuration options. + * @returns {WritableStream} + * + * @example + * ``` + * const fs = require('fs'); + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * //

Uploading a File

+ * // + * // Now, consider a case where we want to upload a file to your bucket. You + * // have the option of using {@link Bucket#upload}, but that is just + * // a convenience method which will do the following. + * //- + * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg') + * .pipe(file.createWriteStream()) + * .on('error', function(err) {}) + * .on('finish', function() { + * // The file upload is complete. + * }); + * + * //- + * //

Uploading a File with gzip compression

+ * //- + * fs.createReadStream('/Users/stephen/site/index.html') + * .pipe(file.createWriteStream({ gzip: true })) + * .on('error', function(err) {}) + * .on('finish', function() { + * // The file upload is complete. + * }); + * + * //- + * // Downloading the file with `createReadStream` will automatically decode + * // the file. + * //- + * + * //- + * //

Uploading a File with Metadata

+ * // + * // One last case you may run into is when you want to upload a file to your + * // bucket and set its metadata at the same time. Like above, you can use + * // {@link Bucket#upload} to do this, which is just a wrapper around + * // the following. + * //- + * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg') + * .pipe(file.createWriteStream({ + * metadata: { + * contentType: 'image/jpeg', + * metadata: { + * custom: 'metadata' + * } + * } + * })) + * .on('error', function(err) {}) + * .on('finish', function() { + * // The file upload is complete. + * }); + * ``` + * + * //- + * //

Continuing a Resumable Upload

+ * // + * // One can capture a `uri` from a resumable upload to reuse later. + * // Additionally, for validation, one can also capture and pass `crc32c`. + * //- + * let uri: string | undefined = undefined; + * let resumeCRC32C: string | undefined = undefined; + * + * fs.createWriteStream() + * .on('uri', link => {uri = link}) + * .on('crc32', crc32c => {resumeCRC32C = crc32c}); + * + * // later... + * fs.createWriteStream({uri, resumeCRC32C}); + */ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + createWriteStream(options = {}) { + var _a; + (_a = options.metadata) !== null && _a !== void 0 ? _a : (options.metadata = {}); + if (options.contentType) { + options.metadata.contentType = options.contentType; + } + if (!options.metadata.contentType || + options.metadata.contentType === 'auto') { + const detectedContentType = mime_1.default.getType(this.name); + if (detectedContentType) { + options.metadata.contentType = detectedContentType; + } + } + let gzip = options.gzip; + if (gzip === 'auto') { + gzip = COMPRESSIBLE_MIME_REGEX.test(options.metadata.contentType || ''); + } + if (gzip) { + options.metadata.contentEncoding = 'gzip'; + } + let crc32c = true; + let md5 = false; + if (typeof options.validation === 'string') { + options.validation = options.validation.toLowerCase(); + crc32c = options.validation === 'crc32c'; + md5 = options.validation === 'md5'; + } + else if (options.validation === false) { + crc32c = false; + md5 = false; + } + if (options.offset) { + if (md5) { + throw new RangeError(FileExceptionMessages.MD5_RESUMED_UPLOAD); + } + if (crc32c && !options.isPartialUpload && !options.resumeCRC32C) { + throw new RangeError(FileExceptionMessages.MISSING_RESUME_CRC32C_FINAL_UPLOAD); + } + } + /** + * A callback for determining when the underlying pipeline is complete. + * It's possible the pipeline callback could error before the write stream + * calls `final` so by default this will destroy the write stream unless the + * write stream sets this callback via its `final` handler. + * @param error An optional error + */ + let pipelineCallback = error => { + writeStream.destroy(error || undefined); + }; + // A stream for consumer to write to + const writeStream = new stream_1.Writable({ + final(cb) { + // Set the pipeline callback to this callback so the pipeline's results + // can be populated to the consumer + pipelineCallback = cb; + emitStream.end(); + }, + write(chunk, encoding, cb) { + emitStream.write(chunk, encoding, cb); + }, + }); + // If the write stream, which is returned to the caller, catches an error we need to make sure that + // at least one of the streams in the pipeline below gets notified so that they + // all get cleaned up / destroyed. + writeStream.once('error', e => { + emitStream.destroy(e); + }); + // If the write stream is closed, cleanup the pipeline below by calling destroy on one of the streams. + writeStream.once('close', () => { + emitStream.destroy(); + }); + const transformStreams = []; + if (gzip) { + transformStreams.push(zlib.createGzip()); + } + const emitStream = new util_js_2.PassThroughShim(); + let hashCalculatingStream = null; + if (crc32c || md5) { + const crc32cInstance = options.resumeCRC32C + ? crc32c_js_1.CRC32C.from(options.resumeCRC32C) + : undefined; + hashCalculatingStream = new hash_stream_validator_js_1.HashStreamValidator({ + crc32c, + crc32cInstance, + md5, + crc32cGenerator: this.crc32cGenerator, + updateHashesOnly: true, + }); + transformStreams.push(hashCalculatingStream); + } + const fileWriteStream = (0, duplexify_1.default)(); + let fileWriteStreamMetadataReceived = false; + // Handing off emitted events to users + emitStream.on('reading', () => writeStream.emit('reading')); + emitStream.on('writing', () => writeStream.emit('writing')); + fileWriteStream.on('uri', evt => writeStream.emit('uri', evt)); + fileWriteStream.on('progress', evt => writeStream.emit('progress', evt)); + fileWriteStream.on('response', resp => writeStream.emit('response', resp)); + fileWriteStream.once('metadata', () => { + fileWriteStreamMetadataReceived = true; + }); + writeStream.once('writing', () => { + if (options.resumable === false) { + this.startSimpleUpload_(fileWriteStream, options); + } + else { + this.startResumableUpload_(fileWriteStream, options); + } + (0, stream_1.pipeline)(emitStream, ...transformStreams, fileWriteStream, async (e) => { + if (e) { + return pipelineCallback(e); + } + // We want to make sure we've received the metadata from the server in order + // to properly validate the object's integrity. Depending on the type of upload, + // the stream could close before the response is returned. + if (!fileWriteStreamMetadataReceived) { + try { + await new Promise((resolve, reject) => { + fileWriteStream.once('metadata', resolve); + fileWriteStream.once('error', reject); + }); + } + catch (e) { + return pipelineCallback(e); + } + } + // Emit the local CRC32C value for future validation, if validation is enabled. + if (hashCalculatingStream === null || hashCalculatingStream === void 0 ? void 0 : hashCalculatingStream.crc32c) { + writeStream.emit('crc32c', hashCalculatingStream.crc32c); + } + try { + // Metadata may not be ready if the upload is a partial upload, + // nothing to validate yet. + const metadataNotReady = options.isPartialUpload && !this.metadata; + if (hashCalculatingStream && !metadataNotReady) { + await __classPrivateFieldGet(this, _File_instances, "m", _File_validateIntegrity).call(this, hashCalculatingStream, { + crc32c, + md5, + }); + } + pipelineCallback(); + } + catch (e) { + pipelineCallback(e); + } + }); + }); + return writeStream; + } + delete(optionsOrCallback, cb) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + this.disableAutoRetryConditionallyIdempotent_(this.methods.delete, bucket_js_1.AvailableServiceObjectMethods.delete, options); + super + .delete(options) + .then(resp => cb(null, ...resp)) + .catch(cb) + .finally(() => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + }); + } + /** + * @typedef {array} DownloadResponse + * @property [0] The contents of a File. + */ + /** + * @callback DownloadCallback + * @param err Request error, if any. + * @param contents The contents of a File. + */ + /** + * Convenience method to download a file into memory or to a local + * destination. + * + * @param {object} [options] Configuration options. The arguments match those + * passed to {@link File#createReadStream}. + * @param {string} [options.destination] Local file path to write the file's + * contents to. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {DownloadCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * // Download a file into memory. The contents will be available as the + * second + * // argument in the demonstration below, `contents`. + * //- + * file.download(function(err, contents) {}); + * + * //- + * // Download a file to a local destination. + * //- + * file.download({ + * destination: '/Users/me/Desktop/file-backup.txt' + * }, function(err) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.download().then(function(data) { + * const contents = data[0]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_download_file + * Another example: + * + * @example include:samples/encryption.js + * region_tag:storage_download_encrypted_file + * Example of downloading an encrypted file: + * + * @example include:samples/requesterPays.js + * region_tag:storage_download_file_requester_pays + * Example of downloading a file where the requester pays: + */ + download(optionsOrCallback, cb) { + let options; + if (typeof optionsOrCallback === 'function') { + cb = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + let called = false; + const callback = ((...args) => { + if (!called) + cb(...args); + called = true; + }); + const destination = options.destination; + delete options.destination; + const fileStream = this.createReadStream(options); + let receivedData = false; + if (destination) { + fileStream + .on('error', callback) + .once('data', data => { + receivedData = true; + // We know that the file exists the server - now we can truncate/write to a file + const writable = fs.createWriteStream(destination); + writable.write(data); + fileStream + .pipe(writable) + .on('error', (err) => { + callback(err, Buffer.from('')); + }) + .on('finish', () => { + callback(null, data); + }); + }) + .on('end', () => { + // In the case of an empty file no data will be received before the end event fires + if (!receivedData) { + const data = Buffer.alloc(0); + try { + fs.writeFileSync(destination, data); + callback(null, data); + } + catch (e) { + callback(e, data); + } + } + }); + } + else { + this.getBufferFromReadable(fileStream) + .then(contents => callback === null || callback === void 0 ? void 0 : callback(null, contents)) + .catch(callback); + } + } + /** + * The Storage API allows you to use a custom key for server-side encryption. + * + * See {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys} + * + * @param {string|buffer} encryptionKey An AES-256 encryption key. + * @returns {File} + * + * @example + * ``` + * const crypto = require('crypto'); + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const encryptionKey = crypto.randomBytes(32); + * + * const fileWithCustomEncryption = myBucket.file('my-file'); + * fileWithCustomEncryption.setEncryptionKey(encryptionKey); + * + * const fileWithoutCustomEncryption = myBucket.file('my-file'); + * + * fileWithCustomEncryption.save('data', function(err) { + * // Try to download with the File object that hasn't had + * // `setEncryptionKey()` called: + * fileWithoutCustomEncryption.download(function(err) { + * // We will receive an error: + * // err.message === 'Bad Request' + * + * // Try again with the File object we called `setEncryptionKey()` on: + * fileWithCustomEncryption.download(function(err, contents) { + * // contents.toString() === 'data' + * }); + * }); + * }); + * + * ``` + * @example include:samples/encryption.js + * region_tag:storage_upload_encrypted_file + * Example of uploading an encrypted file: + * + * @example include:samples/encryption.js + * region_tag:storage_download_encrypted_file + * Example of downloading an encrypted file: + */ + setEncryptionKey(encryptionKey) { + this.encryptionKey = encryptionKey; + this.encryptionKeyBase64 = Buffer.from(encryptionKey).toString('base64'); + this.encryptionKeyHash = crypto + .createHash('sha256') + // eslint-disable-next-line @typescript-eslint/no-explicit-any + .update(this.encryptionKeyBase64, 'base64') + .digest('base64'); + this.encryptionKeyInterceptor = { + request: reqOpts => { + reqOpts.headers = reqOpts.headers || {}; + reqOpts.headers['x-goog-encryption-algorithm'] = 'AES256'; + reqOpts.headers['x-goog-encryption-key'] = this.encryptionKeyBase64; + reqOpts.headers['x-goog-encryption-key-sha256'] = + this.encryptionKeyHash; + return reqOpts; + }, + }; + this.interceptors.push(this.encryptionKeyInterceptor); + return this; + } + /** + * Gets a reference to a Cloud Storage {@link File} file from the provided URL in string format. + * @param {string} publicUrlOrGsUrl the URL as a string. Must be of the format gs://bucket/file + * or https://storage.googleapis.com/bucket/file. + * @param {Storage} storageInstance an instance of a Storage object. + * @param {FileOptions} [options] Configuration options + * @returns {File} + */ + static from(publicUrlOrGsUrl, storageInstance, options) { + const gsMatches = [...publicUrlOrGsUrl.matchAll(GS_UTIL_URL_REGEX)]; + const httpsMatches = [...publicUrlOrGsUrl.matchAll(HTTPS_PUBLIC_URL_REGEX)]; + if (gsMatches.length > 0) { + const bucket = new bucket_js_1.Bucket(storageInstance, gsMatches[0][2]); + return new File(bucket, gsMatches[0][3], options); + } + else if (httpsMatches.length > 0) { + const bucket = new bucket_js_1.Bucket(storageInstance, httpsMatches[0][3]); + return new File(bucket, httpsMatches[0][4], options); + } + else { + throw new Error('URL string must be of format gs://bucket/file or https://storage.googleapis.com/bucket/file'); + } + } + get(optionsOrCallback, cb) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = + typeof optionsOrCallback === 'function' + ? optionsOrCallback + : cb; + super + .get(options) + .then(resp => cb(null, ...resp)) + .catch(cb); + } + /** + * @typedef {array} GetExpirationDateResponse + * @property {date} 0 A Date object representing the earliest time this file's + * retention policy will expire. + */ + /** + * @callback GetExpirationDateCallback + * @param {?Error} err Request error, if any. + * @param {date} expirationDate A Date object representing the earliest time + * this file's retention policy will expire. + */ + /** + * If this bucket has a retention policy defined, use this method to get a + * Date object representing the earliest time this file will expire. + * + * @param {GetExpirationDateCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.getExpirationDate(function(err, expirationDate) { + * // expirationDate is a Date object. + * }); + * ``` + */ + getExpirationDate(callback) { + this.getMetadata((err, metadata, apiResponse) => { + if (err) { + callback(err, null, apiResponse); + return; + } + if (!metadata.retentionExpirationTime) { + const error = new Error(FileExceptionMessages.EXPIRATION_TIME_NA); + callback(error, null, apiResponse); + return; + } + callback(null, new Date(metadata.retentionExpirationTime), apiResponse); + }); + } + /** + * @typedef {array} GenerateSignedPostPolicyV2Response + * @property {object} 0 The document policy. + */ + /** + * @callback GenerateSignedPostPolicyV2Callback + * @param {?Error} err Request error, if any. + * @param {object} policy The document policy. + */ + /** + * Get a signed policy document to allow a user to upload data with a POST + * request. + * + * In Google Cloud Platform environments, such as Cloud Functions and App + * Engine, you usually don't provide a `keyFilename` or `credentials` during + * instantiation. In those environments, we call the + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} + * to create a signed policy. That API requires either the + * `https://www.googleapis.com/auth/iam` or + * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are + * enabled. + * + * See {@link https://cloud.google.com/storage/docs/xml-api/post-object-v2| POST Object with the V2 signing process} + * + * @throws {Error} If an expiration timestamp from the past is given. + * @throws {Error} If options.equals has an array with less or more than two + * members. + * @throws {Error} If options.startsWith has an array with less or more than two + * members. + * + * @param {object} options Configuration options. + * @param {array|array[]} [options.equals] Array of request parameters and + * their expected value (e.g. [['$', '']]). Values are + * translated into equality constraints in the conditions field of the + * policy document (e.g. ['eq', '$', '']). If only one + * equality condition is to be specified, options.equals can be a one- + * dimensional array (e.g. ['$', '']). + * @param {*} options.expires - A timestamp when this policy will expire. Any + * value given is passed to `new Date()`. + * @param {array|array[]} [options.startsWith] Array of request parameters and + * their expected prefixes (e.g. [['$', '']). Values are + * translated into starts-with constraints in the conditions field of the + * policy document (e.g. ['starts-with', '$', '']). If only + * one prefix condition is to be specified, options.startsWith can be a + * one- dimensional array (e.g. ['$', '']). + * @param {string} [options.acl] ACL for the object from possibly predefined + * ACLs. + * @param {string} [options.successRedirect] The URL to which the user client + * is redirected if the upload is successful. + * @param {string} [options.successStatus] - The status of the Google Storage + * response if the upload is successful (must be string). + * @param {object} [options.contentLengthRange] + * @param {number} [options.contentLengthRange.min] Minimum value for the + * request's content length. + * @param {number} [options.contentLengthRange.max] Maximum value for the + * request's content length. + * @param {GenerateSignedPostPolicyV2Callback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * const options = { + * equals: ['$Content-Type', 'image/jpeg'], + * expires: '10-25-2022', + * contentLengthRange: { + * min: 0, + * max: 1024 + * } + * }; + * + * file.generateSignedPostPolicyV2(options, function(err, policy) { + * // policy.string: the policy document in plain text. + * // policy.base64: the policy document in base64. + * // policy.signature: the policy signature in base64. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.generateSignedPostPolicyV2(options).then(function(data) { + * const policy = data[0]; + * }); + * ``` + */ + generateSignedPostPolicyV2(optionsOrCallback, cb) { + const args = (0, util_js_2.normalize)(optionsOrCallback, cb); + let options = args.options; + const callback = args.callback; + const expires = new Date(options.expires); + if (isNaN(expires.getTime())) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_INVALID); + } + if (expires.valueOf() < Date.now()) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_PAST); + } + options = Object.assign({}, options); + const conditions = [ + ['eq', '$key', this.name], + { + bucket: this.bucket.name, + }, + ]; + if (Array.isArray(options.equals)) { + if (!Array.isArray(options.equals[0])) { + options.equals = [options.equals]; + } + options.equals.forEach(condition => { + if (!Array.isArray(condition) || condition.length !== 2) { + throw new Error(FileExceptionMessages.EQUALS_CONDITION_TWO_ELEMENTS); + } + conditions.push(['eq', condition[0], condition[1]]); + }); + } + if (Array.isArray(options.startsWith)) { + if (!Array.isArray(options.startsWith[0])) { + options.startsWith = [options.startsWith]; + } + options.startsWith.forEach(condition => { + if (!Array.isArray(condition) || condition.length !== 2) { + throw new Error(FileExceptionMessages.STARTS_WITH_TWO_ELEMENTS); + } + conditions.push(['starts-with', condition[0], condition[1]]); + }); + } + if (options.acl) { + conditions.push({ + acl: options.acl, + }); + } + if (options.successRedirect) { + conditions.push({ + success_action_redirect: options.successRedirect, + }); + } + if (options.successStatus) { + conditions.push({ + success_action_status: options.successStatus, + }); + } + if (options.contentLengthRange) { + const min = options.contentLengthRange.min; + const max = options.contentLengthRange.max; + if (typeof min !== 'number' || typeof max !== 'number') { + throw new Error(FileExceptionMessages.CONTENT_LENGTH_RANGE_MIN_MAX); + } + conditions.push(['content-length-range', min, max]); + } + const policy = { + expiration: expires.toISOString(), + conditions, + }; + const policyString = JSON.stringify(policy); + const policyBase64 = Buffer.from(policyString).toString('base64'); + this.storage.authClient.sign(policyBase64, options.signingEndpoint).then(signature => { + callback(null, { + string: policyString, + base64: policyBase64, + signature, + }); + }, err => { + callback(new signer_js_1.SigningError(err.message)); + }); + } + /** + * @typedef {object} SignedPostPolicyV4Output + * @property {string} url The request URL. + * @property {object} fields The form fields to include in the POST request. + */ + /** + * @typedef {array} GenerateSignedPostPolicyV4Response + * @property {SignedPostPolicyV4Output} 0 An object containing the request URL and form fields. + */ + /** + * @callback GenerateSignedPostPolicyV4Callback + * @param {?Error} err Request error, if any. + * @param {SignedPostPolicyV4Output} output An object containing the request URL and form fields. + */ + /** + * Get a v4 signed policy document to allow a user to upload data with a POST + * request. + * + * In Google Cloud Platform environments, such as Cloud Functions and App + * Engine, you usually don't provide a `keyFilename` or `credentials` during + * instantiation. In those environments, we call the + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} + * to create a signed policy. That API requires either the + * `https://www.googleapis.com/auth/iam` or + * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are + * enabled. + * + * See {@link https://cloud.google.com/storage/docs/xml-api/post-object#policydocument| Policy Document Reference} + * + * @param {object} options Configuration options. + * @param {Date|number|string} options.expires - A timestamp when this policy will expire. Any + * value given is passed to `new Date()`. + * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style + * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style + * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs + * should generally be preferred instead of path-style URL. + * Currently defaults to `false` for path-style, although this may change in a + * future major-version release. + * @param {string} [config.bucketBoundHostname] The bucket-bound hostname to return in + * the result, e.g. "https://cdn.example.com". + * @param {object} [config.fields] [Form fields]{@link https://cloud.google.com/storage/docs/xml-api/post-object#policydocument} + * to include in the signed policy. Any fields with key beginning with 'x-ignore-' + * will not be included in the policy to be signed. + * @param {object[]} [config.conditions] [Conditions]{@link https://cloud.google.com/storage/docs/authentication/signatures#policy-document} + * to include in the signed policy. All fields given in `config.fields` are + * automatically included in the conditions array, adding the same entry + * in both `fields` and `conditions` will result in duplicate entries. + * + * @param {GenerateSignedPostPolicyV4Callback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * const options = { + * expires: '10-25-2022', + * conditions: [ + * ['eq', '$Content-Type', 'image/jpeg'], + * ['content-length-range', 0, 1024], + * ], + * fields: { + * acl: 'public-read', + * 'x-goog-meta-foo': 'bar', + * 'x-ignore-mykey': 'data' + * } + * }; + * + * file.generateSignedPostPolicyV4(options, function(err, response) { + * // response.url The request URL + * // response.fields The form fields (including the signature) to include + * // to be used to upload objects by HTML forms. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.generateSignedPostPolicyV4(options).then(function(data) { + * const response = data[0]; + * // response.url The request URL + * // response.fields The form fields (including the signature) to include + * // to be used to upload objects by HTML forms. + * }); + * ``` + */ + generateSignedPostPolicyV4(optionsOrCallback, cb) { + const args = (0, util_js_2.normalize)(optionsOrCallback, cb); + let options = args.options; + const callback = args.callback; + const expires = new Date(options.expires); + if (isNaN(expires.getTime())) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_INVALID); + } + if (expires.valueOf() < Date.now()) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_PAST); + } + if (expires.valueOf() - Date.now() > SEVEN_DAYS * 1000) { + throw new Error(`Max allowed expiration is seven days (${SEVEN_DAYS} seconds).`); + } + options = Object.assign({}, options); + let fields = Object.assign({}, options.fields); + const now = new Date(); + const nowISO = (0, util_js_2.formatAsUTCISO)(now, true); + const todayISO = (0, util_js_2.formatAsUTCISO)(now); + const sign = async () => { + const { client_email } = await this.storage.authClient.getCredentials(); + const credential = `${client_email}/${todayISO}/auto/storage/goog4_request`; + fields = { + ...fields, + bucket: this.bucket.name, + key: this.name, + 'x-goog-date': nowISO, + 'x-goog-credential': credential, + 'x-goog-algorithm': 'GOOG4-RSA-SHA256', + }; + const conditions = options.conditions || []; + Object.entries(fields).forEach(([key, value]) => { + if (!key.startsWith('x-ignore-')) { + conditions.push({ [key]: value }); + } + }); + delete fields.bucket; + const expiration = (0, util_js_2.formatAsUTCISO)(expires, true, '-', ':'); + const policy = { + conditions, + expiration, + }; + const policyString = (0, util_js_2.unicodeJSONStringify)(policy); + const policyBase64 = Buffer.from(policyString).toString('base64'); + try { + const signature = await this.storage.authClient.sign(policyBase64, options.signingEndpoint); + const signatureHex = Buffer.from(signature, 'base64').toString('hex'); + const universe = this.parent.storage.universeDomain; + fields['policy'] = policyBase64; + fields['x-goog-signature'] = signatureHex; + let url; + if (this.storage.customEndpoint) { + url = this.storage.apiEndpoint; + } + else if (options.virtualHostedStyle) { + url = `https://${this.bucket.name}.storage.${universe}/`; + } + else if (options.bucketBoundHostname) { + url = `${options.bucketBoundHostname}/`; + } + else { + url = `https://storage.${universe}/${this.bucket.name}/`; + } + return { + url, + fields, + }; + } + catch (err) { + throw new signer_js_1.SigningError(err.message); + } + }; + sign().then(res => callback(null, res), callback); + } + /** + * @typedef {array} GetSignedUrlResponse + * @property {object} 0 The signed URL. + */ + /** + * @callback GetSignedUrlCallback + * @param {?Error} err Request error, if any. + * @param {object} url The signed URL. + */ + /** + * Get a signed URL to allow limited time access to the file. + * + * In Google Cloud Platform environments, such as Cloud Functions and App + * Engine, you usually don't provide a `keyFilename` or `credentials` during + * instantiation. In those environments, we call the + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} + * to create a signed URL. That API requires either the + * `https://www.googleapis.com/auth/iam` or + * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are + * enabled. + * + * See {@link https://cloud.google.com/storage/docs/access-control/signed-urls| Signed URLs Reference} + * + * @throws {Error} if an expiration timestamp from the past is given. + * + * @param {object} config Configuration object. + * @param {string} config.action "read" (HTTP: GET), "write" (HTTP: PUT), or + * "delete" (HTTP: DELETE), "resumable" (HTTP: POST). + * When using "resumable", the header `X-Goog-Resumable: start` has + * to be sent when making a request with the signed URL. + * @param {*} config.expires A timestamp when this link will expire. Any value + * given is passed to `new Date()`. + * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now. + * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example} + * @param {string} [config.version='v2'] The signing version to use, either + * 'v2' or 'v4'. + * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style + * URLs (e.g. 'https://mybucket.storage.googleapis.com/...') instead of path-style + * (e.g. 'https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs + * should generally be preferred instaed of path-style URL. + * Currently defaults to `false` for path-style, although this may change in a + * future major-version release. + * @param {string} [config.cname] The cname for this bucket, i.e., + * "https://cdn.example.com". + * @param {string} [config.contentMd5] The MD5 digest value in base64. Just like + * if you provide this, the client must provide this HTTP header with this same + * value in its request, so to if this parameter is not provided here, + * the client must not provide any value for this HTTP header in its request. + * @param {string} [config.contentType] Just like if you provide this, the client + * must provide this HTTP header with this same value in its request, so to if + * this parameter is not provided here, the client must not provide any value + * for this HTTP header in its request. + * @param {object} [config.extensionHeaders] If these headers are used, the + * server will check to make sure that the client provides matching + * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers} + * for the requirements of this feature, most notably: + * - The header name must be prefixed with `x-goog-` + * - The header name must be all lowercase + * + * Note: Multi-valued header passed as an array in the extensionHeaders + * object is converted into a string, delimited by `,` with + * no space. Requests made using the signed URL will need to + * delimit multi-valued headers using a single `,` as well, or + * else the server will report a mismatched signature. + * @param {object} [config.queryParams] Additional query parameters to include + * in the signed URL. + * @param {string} [config.promptSaveAs] The filename to prompt the user to + * save the file as when the signed url is accessed. This is ignored if + * `config.responseDisposition` is set. + * @param {string} [config.responseDisposition] The + * {@link http://goo.gl/yMWxQV| response-content-disposition parameter} of the + * signed url. + * @param {*} [config.accessibleAt=Date.now()] A timestamp when this link became usable. Any value + * given is passed to `new Date()`. + * Note: Use for 'v4' only. + * @param {string} [config.responseType] The response-content-type parameter + * of the signed url. + * @param {GetSignedUrlCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * // Generate a URL that allows temporary access to download your file. + * //- + * const request = require('request'); + * + * const config = { + * action: 'read', + * expires: '03-17-2025', + * }; + * + * file.getSignedUrl(config, function(err, url) { + * if (err) { + * console.error(err); + * return; + * } + * + * // The file is now available to read from this URL. + * request(url, function(err, resp) { + * // resp.statusCode = 200 + * }); + * }); + * + * //- + * // Generate a URL that allows temporary access to download your file. + * // Access will begin at accessibleAt and end at expires. + * //- + * const request = require('request'); + * + * const config = { + * action: 'read', + * expires: '03-17-2025', + * accessibleAt: '03-13-2025' + * }; + * + * file.getSignedUrl(config, function(err, url) { + * if (err) { + * console.error(err); + * return; + * } + * + * // The file will be available to read from this URL from 03-13-2025 to 03-17-2025. + * request(url, function(err, resp) { + * // resp.statusCode = 200 + * }); + * }); + * + * //- + * // Generate a URL to allow write permissions. This means anyone with this + * URL + * // can send a POST request with new data that will overwrite the file. + * //- + * file.getSignedUrl({ + * action: 'write', + * expires: '03-17-2025' + * }, function(err, url) { + * if (err) { + * console.error(err); + * return; + * } + * + * // The file is now available to be written to. + * const writeStream = request.put(url); + * writeStream.end('New data'); + * + * writeStream.on('complete', function(resp) { + * // Confirm the new content was saved. + * file.download(function(err, fileContents) { + * console.log('Contents:', fileContents.toString()); + * // Contents: New data + * }); + * }); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.getSignedUrl(config).then(function(data) { + * const url = data[0]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_generate_signed_url + * Another example: + */ + getSignedUrl(cfg, callback) { + const method = ActionToHTTPMethod[cfg.action]; + const extensionHeaders = (0, util_js_2.objectKeyToLowercase)(cfg.extensionHeaders || {}); + if (cfg.action === 'resumable') { + extensionHeaders['x-goog-resumable'] = 'start'; + } + const queryParams = Object.assign({}, cfg.queryParams); + if (typeof cfg.responseType === 'string') { + queryParams['response-content-type'] = cfg.responseType; + } + if (typeof cfg.promptSaveAs === 'string') { + queryParams['response-content-disposition'] = + 'attachment; filename="' + cfg.promptSaveAs + '"'; + } + if (typeof cfg.responseDisposition === 'string') { + queryParams['response-content-disposition'] = cfg.responseDisposition; + } + if (this.generation) { + queryParams['generation'] = this.generation.toString(); + } + const signConfig = { + method, + expires: cfg.expires, + accessibleAt: cfg.accessibleAt, + extensionHeaders, + queryParams, + contentMd5: cfg.contentMd5, + contentType: cfg.contentType, + host: cfg.host, + }; + if (cfg.cname) { + signConfig.cname = cfg.cname; + } + if (cfg.version) { + signConfig.version = cfg.version; + } + if (cfg.virtualHostedStyle) { + signConfig.virtualHostedStyle = cfg.virtualHostedStyle; + } + if (!this.signer) { + this.signer = new signer_js_1.URLSigner(this.storage.authClient, this.bucket, this, this.storage); + } + this.signer + .getSignedUrl(signConfig) + .then(signedUrl => callback(null, signedUrl), callback); + } + /** + * @callback IsPublicCallback + * @param {?Error} err Request error, if any. + * @param {boolean} resp Whether file is public or not. + */ + /** + * @typedef {array} IsPublicResponse + * @property {boolean} 0 Whether file is public or not. + */ + /** + * Check whether this file is public or not by sending + * a HEAD request without credentials. + * No errors from the server indicates that the current + * file is public. + * A 403-Forbidden error {@link https://cloud.google.com/storage/docs/json_api/v1/status-codes#403_Forbidden} + * indicates that file is private. + * Any other non 403 error is propagated to user. + * + * @param {IsPublicCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * // Check whether the file is publicly accessible. + * //- + * file.isPublic(function(err, resp) { + * if (err) { + * console.error(err); + * return; + * } + * console.log(`the file ${file.id} is public: ${resp}`) ; + * }) + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.isPublic().then(function(data) { + * const resp = data[0]; + * }); + * ``` + */ + isPublic(callback) { + var _a; + // Build any custom headers based on the defined interceptors on the parent + // storage object and this object + const storageInterceptors = ((_a = this.storage) === null || _a === void 0 ? void 0 : _a.interceptors) || []; + const fileInterceptors = this.interceptors || []; + const allInterceptors = storageInterceptors.concat(fileInterceptors); + const headers = allInterceptors.reduce((acc, curInterceptor) => { + const currentHeaders = curInterceptor.request({ + uri: `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`, + }); + Object.assign(acc, currentHeaders.headers); + return acc; + }, {}); + index_js_1.util.makeRequest({ + method: 'GET', + uri: `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`, + headers, + }, { + retryOptions: this.storage.retryOptions, + }, (err) => { + if (err) { + const apiError = err; + if (apiError.code === 403) { + callback(null, false); + } + else { + callback(err); + } + } + else { + callback(null, true); + } + }); + } + /** + * @typedef {object} MakeFilePrivateOptions Configuration options for File#makePrivate(). + * @property {Metadata} [metadata] Define custom metadata properties to define + * along with the operation. + * @property {boolean} [strict] If true, set the file to be private to + * only the owner user. Otherwise, it will be private to the project. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback MakeFilePrivateCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} MakeFilePrivateResponse + * @property {object} 0 The full API response. + */ + /** + * Make a file private to the project and remove all other permissions. + * Set `options.strict` to true to make the file private to only the owner. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/patch| Objects: patch API Documentation} + * + * @param {MakeFilePrivateOptions} [options] Configuration options. + * @param {MakeFilePrivateCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * // Set the file private so only project maintainers can see and modify it. + * //- + * file.makePrivate(function(err) {}); + * + * //- + * // Set the file private so only the owner can see and modify it. + * //- + * file.makePrivate({ strict: true }, function(err) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.makePrivate().then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + makePrivate(optionsOrCallback, callback) { + var _a, _b; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const query = { + predefinedAcl: options.strict ? 'private' : 'projectPrivate', + // eslint-disable-next-line @typescript-eslint/no-explicit-any + }; + if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifMetagenerationMatch) !== undefined) { + query.ifMetagenerationMatch = + (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifMetagenerationMatch; + delete options.preconditionOpts; + } + if (options.userProject) { + query.userProject = options.userProject; + } + // You aren't allowed to set both predefinedAcl & acl properties on a file, + // so acl must explicitly be nullified, destroying all previous acls on the + // file. + const metadata = { ...options.metadata, acl: null }; + this.setMetadata(metadata, query, callback); + } + /** + * @typedef {array} MakeFilePublicResponse + * @property {object} 0 The full API response. + */ + /** + * @callback MakeFilePublicCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Set a file to be publicly readable and maintain all previous permissions. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/insert| ObjectAccessControls: insert API Documentation} + * + * @param {MakeFilePublicCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.makePublic(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.makePublic().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_make_public + * Another example: + */ + makePublic(callback) { + callback = callback || index_js_1.util.noop; + this.acl.add({ + entity: 'allUsers', + role: 'READER', + }, (err, acl, resp) => { + callback(err, resp); + }); + } + /** + * The public URL of this File + * Use {@link File#makePublic} to enable anonymous access via the returned URL. + * + * @returns {string} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const file = bucket.file('my-file'); + * + * // publicUrl will be "https://storage.googleapis.com/albums/my-file" + * const publicUrl = file.publicUrl(); + * ``` + */ + publicUrl() { + return `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`; + } + /** + * @typedef {array} MoveFileAtomicResponse + * @property {File} 0 The moved {@link File}. + * @property {object} 1 The full API response. + */ + /** + * @callback MoveFileAtomicCallback + * @param {?Error} err Request error, if any. + * @param {File} movedFile The moved {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} MoveFileAtomicOptions Configuration options for File#moveFileAtomic(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {object} [preconditionOpts] Precondition options. + * @property {number} [preconditionOpts.ifGenerationMatch] Makes the operation conditional on whether the object's current generation matches the given value. + */ + /** + * Move this file within the same HNS-enabled bucket. + * The source object must exist and be a live object. + * The source and destination object IDs must be different. + * Overwriting the destination object is allowed by default, but can be prevented + * using preconditions. + * If the destination path includes non-existent parent folders, they will be created. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/move| Objects: move API Documentation} + * + * @throws {Error} If the destination file is not provided. + * + * @param {string|File} destination Destination file name or File object within the same bucket.. + * @param {MoveFileAtomicOptions} [options] Configuration options. See an + * @param {MoveFileAtomicCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Assume 'my-hns-bucket' is an HNS-enabled bucket. + * //- + * const bucket = storage.bucket('my-hns-bucket'); + * const file = bucket.file('my-image.png'); + * + * //- + * // If you pass in a string for the destination, the file is copied to its + * // current bucket, under the new name provided. + * //- + * file.moveFileAtomic('moved-image.png', function(err, movedFile, apiResponse) { + * // `my-hns-bucket` now contains: + * // - "moved-image.png" + * + * // `movedFile` is an instance of a File object that refers to your new + * // file. + * }); + * + * //- + * // Move the file to a subdirectory, creating parent folders if necessary. + * //- + * file.moveFileAtomic('new-folder/subfolder/moved-image.png', function(err, movedFile, apiResponse) { + * // `my-hns-bucket` now contains: + * // - "new-folder/subfolder/moved-image.png" + * }); + * + * //- + * // Prevent overwriting an existing destination object using preconditions. + * //- + * file.moveFileAtomic('existing-destination.png', { + * preconditionOpts: { + * ifGenerationMatch: 0 // Fails if the destination object exists. + * } + * }, function(err, movedFile, apiResponse) { + * if (err) { + * // Handle the error (e.g., the destination object already exists). + * } else { + * // Move successful. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.moveFileAtomic('moved-image.png).then(function(data) { + * const newFile = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_move_file_hns + * Another example: + */ + moveFileAtomic(destination, optionsOrCallback, callback) { + var _a, _b; + const noDestinationError = new Error(FileExceptionMessages.DESTINATION_NO_NAME); + if (!destination) { + throw noDestinationError; + } + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = { ...optionsOrCallback }; + } + callback = callback || index_js_1.util.noop; + let destName; + let newFile; + if (typeof destination === 'string') { + const parsedDestination = GS_URL_REGEXP.exec(destination); + if (parsedDestination !== null && parsedDestination.length === 3) { + destName = parsedDestination[2]; + } + else { + destName = destination; + } + } + else if (destination instanceof File) { + destName = destination.name; + newFile = destination; + } + else { + throw noDestinationError; + } + newFile = newFile || this.bucket.file(destName); + if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) { + this.storage.retryOptions.autoRetry = false; + } + const query = {}; + if (options.userProject !== undefined) { + query.userProject = options.userProject; + delete options.userProject; + } + if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined) { + query.ifGenerationMatch = (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch; + delete options.preconditionOpts; + } + this.request({ + method: 'POST', + uri: `/moveTo/o/${encodeURIComponent(newFile.name)}`, + qs: query, + json: options, + }, (err, resp) => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + if (err) { + callback(err, null, resp); + return; + } + callback(null, newFile, resp); + }); + } + /** + * @typedef {array} MoveResponse + * @property {File} 0 The destination File. + * @property {object} 1 The full API response. + */ + /** + * @callback MoveCallback + * @param {?Error} err Request error, if any. + * @param {?File} destinationFile The destination File. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} MoveOptions Configuration options for File#move(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Move this file to another location. By default, this will rename the file + * and keep it in the same bucket, but you can choose to move it to another + * Bucket by providing a Bucket or File object or a URL beginning with + * "gs://". + * + * **Warning**: + * There is currently no atomic `move` method in the Cloud Storage API, + * so this method is a composition of {@link File#copy} (to the new + * location) and {@link File#delete} (from the old location). While + * unlikely, it is possible that an error returned to your callback could be + * triggered from either one of these API calls failing, which could leave a + * duplicate file lingering. The error message will indicate what operation + * has failed. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/copy| Objects: copy API Documentation} + * + * @throws {Error} If the destination file is not provided. + * + * @param {string|Bucket|File} destination Destination file. + * @param {MoveCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * //- + * // You can pass in a variety of types for the destination. + * // + * // For all of the below examples, assume we are working with the following + * // Bucket and File objects. + * //- + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('my-image.png'); + * + * //- + * // If you pass in a string for the destination, the file is moved to its + * // current bucket, under the new name provided. + * //- + * file.move('my-image-new.png', function(err, destinationFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // but contains instead: + * // - "my-image-new.png" + * + * // `destinationFile` is an instance of a File object that refers to your + * // new file. + * }); + * + * //- + * // If you pass in a string starting with "gs://" for the destination, the + * // file is copied to the other bucket and under the new name provided. + * //- + * const newLocation = 'gs://another-bucket/my-image-new.png'; + * file.move(newLocation, function(err, destinationFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-image-new.png" + * + * // `destinationFile` is an instance of a File object that refers to your + * // new file. + * }); + * + * //- + * // If you pass in a Bucket object, the file will be moved to that bucket + * // using the same name. + * //- + * const anotherBucket = gcs.bucket('another-bucket'); + * + * file.move(anotherBucket, function(err, destinationFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-image.png" + * + * // `destinationFile` is an instance of a File object that refers to your + * // new file. + * }); + * + * //- + * // If you pass in a File object, you have complete control over the new + * // bucket and filename. + * //- + * const anotherFile = anotherBucket.file('my-awesome-image.png'); + * + * file.move(anotherFile, function(err, destinationFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-awesome-image.png" + * + * // Note: + * // The `destinationFile` parameter is equal to `anotherFile`. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.move('my-image-new.png').then(function(data) { + * const destinationFile = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_move_file + * Another example: + */ + move(destination, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + callback = callback || index_js_1.util.noop; + this.copy(destination, options, (err, destinationFile, copyApiResponse) => { + if (err) { + err.message = 'file#copy failed with an error - ' + err.message; + callback(err, null, copyApiResponse); + return; + } + if (this.name !== destinationFile.name || + this.bucket.name !== destinationFile.bucket.name) { + this.delete(options, (err, apiResponse) => { + if (err) { + err.message = 'file#delete failed with an error - ' + err.message; + callback(err, destinationFile, apiResponse); + return; + } + callback(null, destinationFile, copyApiResponse); + }); + } + else { + callback(null, destinationFile, copyApiResponse); + } + }); + } + /** + * @typedef {array} RenameResponse + * @property {File} 0 The destination File. + * @property {object} 1 The full API response. + */ + /** + * @callback RenameCallback + * @param {?Error} err Request error, if any. + * @param {?File} destinationFile The destination File. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} RenameOptions Configuration options for File#move(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Rename this file. + * + * **Warning**: + * There is currently no atomic `rename` method in the Cloud Storage API, + * so this method is an alias of {@link File#move}, which in turn is a + * composition of {@link File#copy} (to the new location) and + * {@link File#delete} (from the old location). While + * unlikely, it is possible that an error returned to your callback could be + * triggered from either one of these API calls failing, which could leave a + * duplicate file lingering. The error message will indicate what operation + * has failed. + * + * @param {string|File} destinationFile Destination file. + * @param {RenameCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // You can pass in a string or a File object. + * // + * // For all of the below examples, assume we are working with the following + * // Bucket and File objects. + * //- + * + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('my-image.png'); + * + * //- + * // You can pass in a string for the destinationFile. + * //- + * file.rename('renamed-image.png', function(err, renamedFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // but contains instead: + * // - "renamed-image.png" + * + * // `renamedFile` is an instance of a File object that refers to your + * // renamed file. + * }); + * + * //- + * // You can pass in a File object. + * //- + * const anotherFile = anotherBucket.file('my-awesome-image.png'); + * + * file.rename(anotherFile, function(err, renamedFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * + * // Note: + * // The `renamedFile` parameter is equal to `anotherFile`. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.rename('my-renamed-image.png').then(function(data) { + * const renamedFile = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + rename(destinationFile, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + callback = callback || index_js_1.util.noop; + this.move(destinationFile, options, callback); + } + /** + * @typedef {object} RestoreOptions Options for File#restore(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + * @param {number} [generation] If present, selects a specific revision of this object. + * @param {string} [restoreToken] Returns an option that must be specified when getting a soft-deleted object from an HNS-enabled + * bucket that has a naming and generation conflict with another object in the same bucket. + * @param {string} [projection] Specifies the set of properties to return. If used, must be 'full' or 'noAcl'. + * @param {string | number} [ifGenerationMatch] Request proceeds if the generation of the target resource + * matches the value used in the precondition. + * If the values don't match, the request fails with a 412 Precondition Failed response. + * @param {string | number} [ifGenerationNotMatch] Request proceeds if the generation of the target resource does + * not match the value used in the precondition. If the values match, the request fails with a 304 Not Modified response. + * @param {string | number} [ifMetagenerationMatch] Request proceeds if the meta-generation of the target resource + * matches the value used in the precondition. + * If the values don't match, the request fails with a 412 Precondition Failed response. + * @param {string | number} [ifMetagenerationNotMatch] Request proceeds if the meta-generation of the target resource does + * not match the value used in the precondition. If the values match, the request fails with a 304 Not Modified response. + */ + /** + * Restores a soft-deleted file + * @param {RestoreOptions} options Restore options. + * @returns {Promise} + */ + async restore(options) { + const [file] = await this.request({ + method: 'POST', + uri: '/restore', + qs: options, + }); + return file; + } + /** + * Makes request and applies userProject query parameter if necessary. + * + * @private + * + * @param {object} reqOpts - The request options. + * @param {function} callback - The callback function. + */ + request(reqOpts, callback) { + return this.parent.request.call(this, reqOpts, callback); + } + /** + * @callback RotateEncryptionKeyCallback + * @extends CopyCallback + */ + /** + * @typedef RotateEncryptionKeyResponse + * @extends CopyResponse + */ + /** + * @param {string|buffer|object} RotateEncryptionKeyOptions Configuration options + * for File#rotateEncryptionKey(). + * If a string or Buffer is provided, it is interpreted as an AES-256, + * customer-supplied encryption key. If you'd like to use a Cloud KMS key + * name, you must specify an options object with the property name: + * `kmsKeyName`. + * @param {string|buffer} [options.encryptionKey] An AES-256 encryption key. + * @param {string} [options.kmsKeyName] A Cloud KMS key name. + */ + /** + * This method allows you to update the encryption key associated with this + * file. + * + * See {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys} + * + * @param {RotateEncryptionKeyOptions} [options] - Configuration options. + * @param {RotateEncryptionKeyCallback} [callback] + * @returns {Promise} + * + * @example include:samples/encryption.js + * region_tag:storage_rotate_encryption_key + * Example of rotating the encryption key for this file: + */ + rotateEncryptionKey(optionsOrCallback, callback) { + var _a; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + let options = {}; + if (typeof optionsOrCallback === 'string' || + optionsOrCallback instanceof Buffer) { + options = { + encryptionKey: optionsOrCallback, + }; + } + else if (typeof optionsOrCallback === 'object') { + options = optionsOrCallback; + } + const newFile = this.bucket.file(this.id, options); + const copyOptions = ((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined + ? { preconditionOpts: options.preconditionOpts } + : {}; + this.copy(newFile, copyOptions, callback); + } + /** + * @typedef {object} SaveOptions + * @extends CreateWriteStreamOptions + */ + /** + * @callback SaveCallback + * @param {?Error} err Request error, if any. + */ + /** + * Write strings or buffers to a file. + * + * *This is a convenience method which wraps {@link File#createWriteStream}.* + * To upload arbitrary data to a file, please use {@link File#createWriteStream} directly. + * + * Resumable uploads are automatically enabled and must be shut off explicitly + * by setting `options.resumable` to `false`. + * + * Multipart uploads with retryable error codes will be retried 3 times with exponential backoff. + * + *

+ * There is some overhead when using a resumable upload that can cause + * noticeable performance degradation while uploading a series of small + * files. When uploading files less than 10MB, it is recommended that the + * resumable feature is disabled. + *

+ * + * @param {SaveData} data The data to write to a file. + * @param {SaveOptions} [options] See {@link File#createWriteStream}'s `options` + * parameter. + * @param {SaveCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * const contents = 'This is the contents of the file.'; + * + * file.save(contents, function(err) { + * if (!err) { + * // File written successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.save(contents).then(function() {}); + * ``` + */ + save(data, optionsOrCallback, callback) { + // tslint:enable:no-any + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + let maxRetries = this.storage.retryOptions.maxRetries; + if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) { + maxRetries = 0; + } + const returnValue = (0, async_retry_1.default)(async (bail) => { + return new Promise((resolve, reject) => { + if (maxRetries === 0) { + this.storage.retryOptions.autoRetry = false; + } + const writable = this.createWriteStream(options); + if (options.onUploadProgress) { + writable.on('progress', options.onUploadProgress); + } + const handleError = (err) => { + if (this.storage.retryOptions.autoRetry && + this.storage.retryOptions.retryableErrorFn(err)) { + return reject(err); + } + return bail(err); + }; + if (typeof data === 'string' || + Buffer.isBuffer(data) || + data instanceof Uint8Array) { + writable + .on('error', handleError) + .on('finish', () => resolve()) + .end(data); + } + else { + (0, stream_1.pipeline)(data, writable, err => { + if (err) { + if (typeof data !== 'function') { + // Only PipelineSourceFunction can be retried. Async-iterables + // and Readable streams can only be consumed once. + return bail(err); + } + handleError(err); + } + else { + resolve(); + } + }); + } + }); + }, { + retries: maxRetries, + factor: this.storage.retryOptions.retryDelayMultiplier, + maxTimeout: this.storage.retryOptions.maxRetryDelay * 1000, //convert to milliseconds + maxRetryTime: this.storage.retryOptions.totalTimeout * 1000, //convert to milliseconds + }); + if (!callback) { + return returnValue; + } + else { + return returnValue + .then(() => { + if (callback) { + return callback(); + } + }) + .catch(callback); + } + } + setMetadata(metadata, optionsOrCallback, cb) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = + typeof optionsOrCallback === 'function' + ? optionsOrCallback + : cb; + this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, bucket_js_1.AvailableServiceObjectMethods.setMetadata, options); + super + .setMetadata(metadata, options) + .then(resp => cb(null, ...resp)) + .catch(cb) + .finally(() => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + }); + } + /** + * @typedef {array} SetStorageClassResponse + * @property {object} 0 The full API response. + */ + /** + * @typedef {object} SetStorageClassOptions Configuration options for File#setStorageClass(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback SetStorageClassCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Set the storage class for this file. + * + * See {@link https://cloud.google.com/storage/docs/per-object-storage-class| Per-Object Storage Class} + * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes} + * + * @param {string} storageClass The new storage class. (`standard`, + * `nearline`, `coldline`, or `archive`) + * **Note:** The storage classes `multi_regional` and `regional` + * are now legacy and will be deprecated in the future. + * @param {SetStorageClassOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {SetStorageClassCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * file.setStorageClass('nearline', function(err, apiResponse) { + * if (err) { + * // Error handling omitted. + * } + * + * // The storage class was updated successfully. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.setStorageClass('nearline').then(function() {}); + * ``` + */ + setStorageClass(storageClass, optionsOrCallback, callback) { + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + const req = { + ...options, + // In case we get input like `storageClass`, convert to `storage_class`. + storageClass: storageClass + .replace(/-/g, '_') + .replace(/([a-z])([A-Z])/g, (_, low, up) => { + return low + '_' + up; + }) + .toUpperCase(), + }; + this.copy(this, req, (err, file, apiResponse) => { + if (err) { + callback(err, apiResponse); + return; + } + this.metadata = file.metadata; + callback(null, apiResponse); + }); + } + /** + * Set a user project to be billed for all requests made from this File + * object. + * + * @param {string} userProject The user project. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const file = bucket.file('my-file'); + * + * file.setUserProject('grape-spaceship-123'); + * ``` + */ + setUserProject(userProject) { + this.bucket.setUserProject.call(this, userProject); + } + /** + * This creates a resumable-upload upload stream. + * + * @param {Duplexify} stream - Duplexify stream of data to pipe to the file. + * @param {object=} options - Configuration object. + * + * @private + */ + startResumableUpload_(dup, options = {}) { + var _a; + (_a = options.metadata) !== null && _a !== void 0 ? _a : (options.metadata = {}); + const retryOptions = this.storage.retryOptions; + if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options.preconditionOpts)) { + retryOptions.autoRetry = false; + } + const cfg = { + authClient: this.storage.authClient, + apiEndpoint: this.storage.apiEndpoint, + bucket: this.bucket.name, + customRequestOptions: this.getRequestInterceptors().reduce((reqOpts, interceptorFn) => interceptorFn(reqOpts), {}), + file: this.name, + generation: this.generation, + isPartialUpload: options.isPartialUpload, + key: this.encryptionKey, + kmsKeyName: this.kmsKeyName, + metadata: options.metadata, + offset: options.offset, + predefinedAcl: options.predefinedAcl, + private: options.private, + public: options.public, + uri: options.uri, + userProject: options.userProject || this.userProject, + retryOptions: { ...retryOptions }, + params: (options === null || options === void 0 ? void 0 : options.preconditionOpts) || this.instancePreconditionOpts, + chunkSize: options === null || options === void 0 ? void 0 : options.chunkSize, + highWaterMark: options === null || options === void 0 ? void 0 : options.highWaterMark, + universeDomain: this.bucket.storage.universeDomain, + [util_js_1.GCCL_GCS_CMD_KEY]: options[util_js_1.GCCL_GCS_CMD_KEY], + }; + let uploadStream; + try { + uploadStream = resumableUpload.upload(cfg); + } + catch (error) { + dup.destroy(error); + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + return; + } + uploadStream + .on('response', resp => { + dup.emit('response', resp); + }) + .on('uri', uri => { + dup.emit('uri', uri); + }) + .on('metadata', metadata => { + this.metadata = metadata; + dup.emit('metadata'); + }) + .on('finish', () => { + dup.emit('complete'); + }) + .on('progress', evt => dup.emit('progress', evt)); + dup.setWritable(uploadStream); + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + } + /** + * Takes a readable stream and pipes it to a remote file. Unlike + * `startResumableUpload_`, which uses the resumable upload technique, this + * method uses a simple upload (all or nothing). + * + * @param {Duplexify} dup - Duplexify stream of data to pipe to the file. + * @param {object=} options - Configuration object. + * + * @private + */ + startSimpleUpload_(dup, options = {}) { + var _a; + (_a = options.metadata) !== null && _a !== void 0 ? _a : (options.metadata = {}); + const apiEndpoint = this.storage.apiEndpoint; + const bucketName = this.bucket.name; + const uri = `${apiEndpoint}/upload/storage/v1/b/${bucketName}/o`; + const reqOpts = { + qs: { + name: this.name, + }, + uri: uri, + [util_js_1.GCCL_GCS_CMD_KEY]: options[util_js_1.GCCL_GCS_CMD_KEY], + }; + if (this.generation !== undefined) { + reqOpts.qs.ifGenerationMatch = this.generation; + } + if (this.kmsKeyName !== undefined) { + reqOpts.qs.kmsKeyName = this.kmsKeyName; + } + if (typeof options.timeout === 'number') { + reqOpts.timeout = options.timeout; + } + if (options.userProject || this.userProject) { + reqOpts.qs.userProject = options.userProject || this.userProject; + } + if (options.predefinedAcl) { + reqOpts.qs.predefinedAcl = options.predefinedAcl; + } + else if (options.private) { + reqOpts.qs.predefinedAcl = 'private'; + } + else if (options.public) { + reqOpts.qs.predefinedAcl = 'publicRead'; + } + Object.assign(reqOpts.qs, this.instancePreconditionOpts, options.preconditionOpts); + index_js_1.util.makeWritableStream(dup, { + makeAuthenticatedRequest: (reqOpts) => { + this.request(reqOpts, (err, body, resp) => { + if (err) { + dup.destroy(err); + return; + } + this.metadata = body; + dup.emit('metadata', body); + dup.emit('response', resp); + dup.emit('complete'); + }); + }, + metadata: options.metadata, + request: reqOpts, + }); + } + disableAutoRetryConditionallyIdempotent_( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + coreOpts, methodType, localPreconditionOptions) { + var _a, _b, _c, _d; + if ((typeof coreOpts === 'object' && + ((_b = (_a = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _a === void 0 ? void 0 : _a.qs) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined && + (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifGenerationMatch) === undefined && + methodType === bucket_js_1.AvailableServiceObjectMethods.delete && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + this.storage.retryOptions.autoRetry = false; + } + if ((typeof coreOpts === 'object' && + ((_d = (_c = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _c === void 0 ? void 0 : _c.qs) === null || _d === void 0 ? void 0 : _d.ifMetagenerationMatch) === undefined && + (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifMetagenerationMatch) === undefined && + methodType === bucket_js_1.AvailableServiceObjectMethods.setMetadata && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + this.storage.retryOptions.autoRetry = false; + } + } + async getBufferFromReadable(readable) { + const buf = []; + for await (const chunk of readable) { + buf.push(chunk); + } + return Buffer.concat(buf); + } +} +exports.File = File; +_File_instances = new WeakSet(), _File_validateIntegrity = +/** + * + * @param hashCalculatingStream + * @param verify + * @returns {boolean} Returns `true` if valid, throws with error otherwise + */ +async function _File_validateIntegrity(hashCalculatingStream, verify = {}) { + const metadata = this.metadata; + // If we're doing validation, assume the worst + let dataMismatch = !!(verify.crc32c || verify.md5); + if (verify.crc32c && metadata.crc32c) { + dataMismatch = !hashCalculatingStream.test('crc32c', metadata.crc32c); + } + if (verify.md5 && metadata.md5Hash) { + dataMismatch = !hashCalculatingStream.test('md5', metadata.md5Hash); + } + if (dataMismatch) { + const errors = []; + let code = ''; + let message = ''; + try { + await this.delete(); + if (verify.md5 && !metadata.md5Hash) { + code = 'MD5_NOT_AVAILABLE'; + message = FileExceptionMessages.MD5_NOT_AVAILABLE; + } + else { + code = 'FILE_NO_UPLOAD'; + message = FileExceptionMessages.UPLOAD_MISMATCH; + } + } + catch (e) { + const error = e; + code = 'FILE_NO_UPLOAD_DELETE'; + message = `${FileExceptionMessages.UPLOAD_MISMATCH_DELETE_FAIL}${error.message}`; + errors.push(error); + } + const error = new RequestError(message); + error.code = code; + error.errors = errors; + throw error; + } + return true; +}; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(File, { + exclude: [ + 'cloudStorageURI', + 'publicUrl', + 'request', + 'save', + 'setEncryptionKey', + 'shouldRetryBasedOnPreconditionAndIdempotencyStrat', + 'getBufferFromReadable', + 'restore', + ], +}); + + +/***/ }), + +/***/ 4873: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _HashStreamValidator_crc32cHash, _HashStreamValidator_md5Hash, _HashStreamValidator_md5Digest; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HashStreamValidator = void 0; +const crypto_1 = __nccwpck_require__(6982); +const stream_1 = __nccwpck_require__(2203); +const crc32c_js_1 = __nccwpck_require__(4317); +const file_js_1 = __nccwpck_require__(9975); +class HashStreamValidator extends stream_1.Transform { + constructor(options = {}) { + super(); + this.updateHashesOnly = false; + _HashStreamValidator_crc32cHash.set(this, undefined); + _HashStreamValidator_md5Hash.set(this, undefined); + _HashStreamValidator_md5Digest.set(this, ''); + this.crc32cEnabled = !!options.crc32c; + this.md5Enabled = !!options.md5; + this.updateHashesOnly = !!options.updateHashesOnly; + this.crc32cExpected = options.crc32cExpected; + this.md5Expected = options.md5Expected; + if (this.crc32cEnabled) { + if (options.crc32cInstance) { + __classPrivateFieldSet(this, _HashStreamValidator_crc32cHash, options.crc32cInstance, "f"); + } + else { + const crc32cGenerator = options.crc32cGenerator || crc32c_js_1.CRC32C_DEFAULT_VALIDATOR_GENERATOR; + __classPrivateFieldSet(this, _HashStreamValidator_crc32cHash, crc32cGenerator(), "f"); + } + } + if (this.md5Enabled) { + __classPrivateFieldSet(this, _HashStreamValidator_md5Hash, (0, crypto_1.createHash)('md5'), "f"); + } + } + /** + * Return the current CRC32C value, if available. + */ + get crc32c() { + var _a; + return (_a = __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f")) === null || _a === void 0 ? void 0 : _a.toString(); + } + _flush(callback) { + if (__classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f")) { + __classPrivateFieldSet(this, _HashStreamValidator_md5Digest, __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f").digest('base64'), "f"); + } + if (this.updateHashesOnly) { + callback(); + return; + } + // If we're doing validation, assume the worst-- a data integrity + // mismatch. If not, these tests won't be performed, and we can assume + // the best. + // We must check if the server decompressed the data on serve because hash + // validation is not possible in this case. + let failed = this.crc32cEnabled || this.md5Enabled; + if (this.crc32cEnabled && this.crc32cExpected) { + failed = !this.test('crc32c', this.crc32cExpected); + } + if (this.md5Enabled && this.md5Expected) { + failed = !this.test('md5', this.md5Expected); + } + if (failed) { + const mismatchError = new file_js_1.RequestError(file_js_1.FileExceptionMessages.DOWNLOAD_MISMATCH); + mismatchError.code = 'CONTENT_DOWNLOAD_MISMATCH'; + callback(mismatchError); + } + else { + callback(); + } + } + _transform(chunk, encoding, callback) { + this.push(chunk, encoding); + try { + if (__classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f")) + __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f").update(chunk); + if (__classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f")) + __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f").update(chunk); + callback(); + } + catch (e) { + callback(e); + } + } + test(hash, sum) { + const check = Buffer.isBuffer(sum) ? sum.toString('base64') : sum; + if (hash === 'crc32c' && __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f")) { + return __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f").validate(check); + } + if (hash === 'md5' && __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f")) { + return __classPrivateFieldGet(this, _HashStreamValidator_md5Digest, "f") === check; + } + return false; + } +} +exports.HashStreamValidator = HashStreamValidator; +_HashStreamValidator_crc32cHash = new WeakMap(), _HashStreamValidator_md5Hash = new WeakMap(), _HashStreamValidator_md5Digest = new WeakMap(); + + +/***/ }), + +/***/ 5891: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HmacKey = void 0; +const index_js_1 = __nccwpck_require__(1325); +const storage_js_1 = __nccwpck_require__(2848); +const promisify_1 = __nccwpck_require__(206); +/** + * The API-formatted resource description of the HMAC key. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name HmacKey#metadata + * @type {object} + */ +/** + * An HmacKey object contains metadata of an HMAC key created from a + * service account through the {@link Storage} client using + * {@link Storage#createHmacKey}. + * + * See {@link https://cloud.google.com/storage/docs/authentication/hmackeys| HMAC keys documentation} + * + * @class + */ +class HmacKey extends index_js_1.ServiceObject { + /** + * @typedef {object} HmacKeyOptions + * @property {string} [projectId] The project ID of the project that owns + * the service account of the requested HMAC key. If not provided, + * the project ID used to instantiate the Storage client will be used. + */ + /** + * Constructs an HmacKey object. + * + * Note: this only create a local reference to an HMAC key, to create + * an HMAC key, use {@link Storage#createHmacKey}. + * + * @param {Storage} storage The Storage instance this HMAC key is + * attached to. + * @param {string} accessId The unique accessId for this HMAC key. + * @param {HmacKeyOptions} options Constructor configurations. + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const hmacKey = storage.hmacKey('access-id'); + * ``` + */ + constructor(storage, accessId, options) { + const methods = { + /** + * @typedef {object} DeleteHmacKeyOptions + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * @typedef {array} DeleteHmacKeyResponse + * @property {object} 0 The full API response. + */ + /** + * @callback DeleteHmacKeyCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Deletes an HMAC key. + * Key state must be set to `INACTIVE` prior to deletion. + * Caution: HMAC keys cannot be recovered once you delete them. + * + * The authenticated user must have `storage.hmacKeys.delete` permission for the project in which the key exists. + * + * @method HmacKey#delete + * @param {DeleteHmacKeyOptions} [options] Configuration options. + * @param {DeleteHmacKeyCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Delete HMAC key after making the key inactive. + * //- + * const hmacKey = storage.hmacKey('ACCESS_ID'); + * hmacKey.setMetadata({state: 'INACTIVE'}, (err, hmacKeyMetadata) => { + * if (err) { + * // The request was an error. + * console.error(err); + * return; + * } + * hmacKey.delete((err) => { + * if (err) { + * console.error(err); + * return; + * } + * // The HMAC key is deleted. + * }); + * }); + * + * //- + * // If the callback is omitted, a promise is returned. + * //- + * const hmacKey = storage.hmacKey('ACCESS_ID'); + * hmacKey + * .setMetadata({state: 'INACTIVE'}) + * .then(() => { + * return hmacKey.delete(); + * }); + * ``` + */ + delete: true, + /** + * @callback GetHmacKeyCallback + * @param {?Error} err Request error, if any. + * @param {HmacKey} hmacKey this {@link HmacKey} instance. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} GetHmacKeyResponse + * @property {HmacKey} 0 This {@link HmacKey} instance. + * @property {object} 1 The full API response. + */ + /** + * @typedef {object} GetHmacKeyOptions + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * Retrieves and populate an HMAC key's metadata, and return + * this {@link HmacKey} instance. + * + * HmacKey.get() does not give the HMAC key secret, as + * it is only returned on creation. + * + * The authenticated user must have `storage.hmacKeys.get` permission + * for the project in which the key exists. + * + * @method HmacKey#get + * @param {GetHmacKeyOptions} [options] Configuration options. + * @param {GetHmacKeyCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Get the HmacKey's Metadata. + * //- + * storage.hmacKey('ACCESS_ID') + * .get((err, hmacKey) => { + * if (err) { + * // The request was an error. + * console.error(err); + * return; + * } + * // do something with the returned HmacKey object. + * }); + * + * //- + * // If the callback is omitted, a promise is returned. + * //- + * storage.hmacKey('ACCESS_ID') + * .get() + * .then((data) => { + * const hmacKey = data[0]; + * }); + * ``` + */ + get: true, + /** + * @typedef {object} GetHmacKeyMetadataOptions + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * Retrieves and populate an HMAC key's metadata, and return + * the HMAC key's metadata as an object. + * + * HmacKey.getMetadata() does not give the HMAC key secret, as + * it is only returned on creation. + * + * The authenticated user must have `storage.hmacKeys.get` permission + * for the project in which the key exists. + * + * @method HmacKey#getMetadata + * @param {GetHmacKeyMetadataOptions} [options] Configuration options. + * @param {HmacKeyMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Get the HmacKey's metadata and populate to the metadata property. + * //- + * storage.hmacKey('ACCESS_ID') + * .getMetadata((err, hmacKeyMetadata) => { + * if (err) { + * // The request was an error. + * console.error(err); + * return; + * } + * console.log(hmacKeyMetadata); + * }); + * + * //- + * // If the callback is omitted, a promise is returned. + * //- + * storage.hmacKey('ACCESS_ID') + * .getMetadata() + * .then((data) => { + * const hmacKeyMetadata = data[0]; + * console.log(hmacKeyMetadata); + * }); + * ``` + */ + getMetadata: true, + /** + * @typedef {object} SetHmacKeyMetadata Subset of {@link HmacKeyMetadata} to update. + * @property {string} state New state of the HmacKey. Either 'ACTIVE' or 'INACTIVE'. + * @property {string} [etag] Include an etag from a previous get HMAC key request + * to perform safe read-modify-write. + */ + /** + * @typedef {object} SetHmacKeyMetadataOptions + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * @callback HmacKeyMetadataCallback + * @param {?Error} err Request error, if any. + * @param {HmacKeyMetadata} metadata The updated {@link HmacKeyMetadata} object. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} HmacKeyMetadataResponse + * @property {HmacKeyMetadata} 0 The updated {@link HmacKeyMetadata} object. + * @property {object} 1 The full API response. + */ + /** + * Updates the state of an HMAC key. See {@link SetHmacKeyMetadata} for + * valid states. + * + * @method HmacKey#setMetadata + * @param {SetHmacKeyMetadata} metadata The new metadata. + * @param {SetHmacKeyMetadataOptions} [options] Configuration options. + * @param {HmacKeyMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * const metadata = { + * state: 'INACTIVE', + * }; + * + * storage.hmacKey('ACCESS_ID') + * .setMetadata(metadata, (err, hmacKeyMetadata) => { + * if (err) { + * // The request was an error. + * console.error(err); + * return; + * } + * console.log(hmacKeyMetadata); + * }); + * + * //- + * // If the callback is omitted, a promise is returned. + * //- + * storage.hmacKey('ACCESS_ID') + * .setMetadata(metadata) + * .then((data) => { + * const hmacKeyMetadata = data[0]; + * console.log(hmacKeyMetadata); + * }); + * ``` + */ + setMetadata: { + reqOpts: { + method: 'PUT', + }, + }, + }; + const projectId = (options && options.projectId) || storage.projectId; + super({ + parent: storage, + id: accessId, + baseUrl: `/projects/${projectId}/hmacKeys`, + methods, + }); + this.storage = storage; + this.instanceRetryValue = storage.retryOptions.autoRetry; + } + setMetadata(metadata, optionsOrCallback, cb) { + // ETag preconditions are not currently supported. Retries should be disabled if the idempotency strategy is not set to RetryAlways + if (this.storage.retryOptions.idempotencyStrategy !== + storage_js_1.IdempotencyStrategy.RetryAlways) { + this.storage.retryOptions.autoRetry = false; + } + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = + typeof optionsOrCallback === 'function' + ? optionsOrCallback + : cb; + super + .setMetadata(metadata, options) + .then(resp => cb(null, ...resp)) + .catch(cb) + .finally(() => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + }); + } +} +exports.HmacKey = HmacKey; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(HmacKey); + + +/***/ }), + +/***/ 2880: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Iam = exports.IAMExceptionMessages = void 0; +const promisify_1 = __nccwpck_require__(206); +const util_js_1 = __nccwpck_require__(4557); +var IAMExceptionMessages; +(function (IAMExceptionMessages) { + IAMExceptionMessages["POLICY_OBJECT_REQUIRED"] = "A policy object is required."; + IAMExceptionMessages["PERMISSIONS_REQUIRED"] = "Permissions are required."; +})(IAMExceptionMessages || (exports.IAMExceptionMessages = IAMExceptionMessages = {})); +/** + * Get and set IAM policies for your Cloud Storage bucket. + * + * See {@link https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management| Cloud Storage IAM Management} + * See {@link https://cloud.google.com/iam/docs/granting-changing-revoking-access| Granting, Changing, and Revoking Access} + * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} + * + * @constructor Iam + * + * @param {Bucket} bucket The parent instance. + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * // bucket.iam + * ``` + */ +class Iam { + constructor(bucket) { + this.request_ = bucket.request.bind(bucket); + this.resourceId_ = 'buckets/' + bucket.getId(); + } + /** + * @typedef {object} GetPolicyOptions Requested options for IAM#getPolicy(). + * @property {number} [requestedPolicyVersion] The version of IAM policies to + * request. If a policy with a condition is requested without setting + * this, the server will return an error. This must be set to a value + * of 3 to retrieve IAM policies containing conditions. This is to + * prevent client code that isn't aware of IAM conditions from + * interpreting and modifying policies incorrectly. The service might + * return a policy with version lower than the one that was requested, + * based on the feature syntax in the policy fetched. + * See {@link https://cloud.google.com/iam/docs/policies#versions| IAM Policy versions} + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} GetPolicyResponse + * @property {Policy} 0 The policy. + * @property {object} 1 The full API response. + */ + /** + * @typedef {object} Policy + * @property {PolicyBinding[]} policy.bindings Bindings associate members with roles. + * @property {string} [policy.etag] Etags are used to perform a read-modify-write. + * @property {number} [policy.version] The syntax schema version of the Policy. + * To set an IAM policy with conditional binding, this field must be set to + * 3 or greater. + * See {@link https://cloud.google.com/iam/docs/policies#versions| IAM Policy versions} + */ + /** + * @typedef {object} PolicyBinding + * @property {string} role Role that is assigned to members. + * @property {string[]} members Specifies the identities requesting access for the bucket. + * @property {Expr} [condition] The condition that is associated with this binding. + */ + /** + * @typedef {object} Expr + * @property {string} [title] An optional title for the expression, i.e. a + * short string describing its purpose. This can be used e.g. in UIs + * which allow to enter the expression. + * @property {string} [description] An optional description of the + * expression. This is a longer text which describes the expression, + * e.g. when hovered over it in a UI. + * @property {string} expression Textual representation of an expression in + * Common Expression Language syntax. The application context of the + * containing message determines which well-known feature set of CEL + * is supported.The condition that is associated with this binding. + * + * @see [Condition] https://cloud.google.com/storage/docs/access-control/iam#conditions + */ + /** + * Get the IAM policy. + * + * @param {GetPolicyOptions} [options] Request options. + * @param {GetPolicyCallback} [callback] Callback function. + * @returns {Promise} + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/getIamPolicy| Buckets: setIamPolicy API Documentation} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * bucket.iam.getPolicy( + * {requestedPolicyVersion: 3}, + * function(err, policy, apiResponse) { + * + * }, + * ); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.getPolicy({requestedPolicyVersion: 3}) + * .then(function(data) { + * const policy = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/iam.js + * region_tag:storage_view_bucket_iam_members + * Example of retrieving a bucket's IAM policy: + */ + getPolicy(optionsOrCallback, callback) { + const { options, callback: cb } = (0, util_js_1.normalize)(optionsOrCallback, callback); + const qs = {}; + if (options.userProject) { + qs.userProject = options.userProject; + } + if (options.requestedPolicyVersion !== null && + options.requestedPolicyVersion !== undefined) { + qs.optionsRequestedPolicyVersion = options.requestedPolicyVersion; + } + this.request_({ + uri: '/iam', + qs, + }, cb); + } + /** + * Set the IAM policy. + * + * @throws {Error} If no policy is provided. + * + * @param {Policy} policy The policy. + * @param {SetPolicyOptions} [options] Configuration options. + * @param {SetPolicyCallback} callback Callback function. + * @returns {Promise} + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/setIamPolicy| Buckets: setIamPolicy API Documentation} + * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * const myPolicy = { + * bindings: [ + * { + * role: 'roles/storage.admin', + * members: + * ['serviceAccount:myotherproject@appspot.gserviceaccount.com'] + * } + * ] + * }; + * + * bucket.iam.setPolicy(myPolicy, function(err, policy, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.setPolicy(myPolicy).then(function(data) { + * const policy = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/iam.js + * region_tag:storage_add_bucket_iam_member + * Example of adding to a bucket's IAM policy: + * + * @example include:samples/iam.js + * region_tag:storage_remove_bucket_iam_member + * Example of removing from a bucket's IAM policy: + */ + setPolicy(policy, optionsOrCallback, callback) { + if (policy === null || typeof policy !== 'object') { + throw new Error(IAMExceptionMessages.POLICY_OBJECT_REQUIRED); + } + const { options, callback: cb } = (0, util_js_1.normalize)(optionsOrCallback, callback); + let maxRetries; + if (policy.etag === undefined) { + maxRetries = 0; + } + this.request_({ + method: 'PUT', + uri: '/iam', + maxRetries, + json: Object.assign({ + resourceId: this.resourceId_, + }, policy), + qs: options, + }, cb); + } + /** + * Test a set of permissions for a resource. + * + * @throws {Error} If permissions are not provided. + * + * @param {string|string[]} permissions The permission(s) to test for. + * @param {TestIamPermissionsOptions} [options] Configuration object. + * @param {TestIamPermissionsCallback} [callback] Callback function. + * @returns {Promise} + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/testIamPermissions| Buckets: testIamPermissions API Documentation} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * //- + * // Test a single permission. + * //- + * const test = 'storage.buckets.delete'; + * + * bucket.iam.testPermissions(test, function(err, permissions, apiResponse) { + * console.log(permissions); + * // { + * // "storage.buckets.delete": true + * // } + * }); + * + * //- + * // Test several permissions at once. + * //- + * const tests = [ + * 'storage.buckets.delete', + * 'storage.buckets.get' + * ]; + * + * bucket.iam.testPermissions(tests, function(err, permissions) { + * console.log(permissions); + * // { + * // "storage.buckets.delete": false, + * // "storage.buckets.get": true + * // } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.testPermissions(test).then(function(data) { + * const permissions = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + testPermissions(permissions, optionsOrCallback, callback) { + if (!Array.isArray(permissions) && typeof permissions !== 'string') { + throw new Error(IAMExceptionMessages.PERMISSIONS_REQUIRED); + } + const { options, callback: cb } = (0, util_js_1.normalize)(optionsOrCallback, callback); + const permissionsArray = Array.isArray(permissions) + ? permissions + : [permissions]; + const req = Object.assign({ + permissions: permissionsArray, + }, options); + this.request_({ + uri: '/iam/testPermissions', + qs: req, + useQuerystring: true, + }, (err, resp) => { + if (err) { + cb(err, null, resp); + return; + } + const availablePermissions = Array.isArray(resp.permissions) + ? resp.permissions + : []; + const permissionsHash = permissionsArray.reduce((acc, permission) => { + acc[permission] = availablePermissions.indexOf(permission) > -1; + return acc; + }, {}); + cb(null, permissionsHash, resp); + }); + } +} +exports.Iam = Iam; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Iam); + + +/***/ }), + +/***/ 8525: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Notification = exports.Iam = exports.HmacKey = exports.File = exports.Channel = exports.Bucket = exports.Storage = exports.RETRYABLE_ERR_FN_DEFAULT = exports.IdempotencyStrategy = exports.ApiError = void 0; +/** + * The `@google-cloud/storage` package has a single named export which is the + * {@link Storage} (ES6) class, which should be instantiated with `new`. + * + * See {@link Storage} and {@link ClientConfig} for client methods and + * configuration options. + * + * @module {Storage} @google-cloud/storage + * @alias nodejs-storage + * + * @example + * Install the client library with npm: + * ``` + * npm install --save @google-cloud/storage + * ``` + * + * @example + * Import the client library + * ``` + * const {Storage} = require('@google-cloud/storage'); + * ``` + * + * @example + * Create a client that uses Application + * Default Credentials (ADC): + * ``` + * const storage = new Storage(); + * ``` + * + * @example + * Create a client with explicit + * credentials: + * ``` + * const storage = new Storage({ projectId: + * 'your-project-id', keyFilename: '/path/to/keyfile.json' + * }); + * ``` + * + * @example include:samples/quickstart.js + * region_tag:storage_quickstart + * Full quickstart example: + */ +var index_js_1 = __nccwpck_require__(1325); +Object.defineProperty(exports, "ApiError", ({ enumerable: true, get: function () { return index_js_1.ApiError; } })); +var storage_js_1 = __nccwpck_require__(2848); +Object.defineProperty(exports, "IdempotencyStrategy", ({ enumerable: true, get: function () { return storage_js_1.IdempotencyStrategy; } })); +Object.defineProperty(exports, "RETRYABLE_ERR_FN_DEFAULT", ({ enumerable: true, get: function () { return storage_js_1.RETRYABLE_ERR_FN_DEFAULT; } })); +Object.defineProperty(exports, "Storage", ({ enumerable: true, get: function () { return storage_js_1.Storage; } })); +var bucket_js_1 = __nccwpck_require__(2887); +Object.defineProperty(exports, "Bucket", ({ enumerable: true, get: function () { return bucket_js_1.Bucket; } })); +__exportStar(__nccwpck_require__(4317), exports); +var channel_js_1 = __nccwpck_require__(2658); +Object.defineProperty(exports, "Channel", ({ enumerable: true, get: function () { return channel_js_1.Channel; } })); +var file_js_1 = __nccwpck_require__(9975); +Object.defineProperty(exports, "File", ({ enumerable: true, get: function () { return file_js_1.File; } })); +__exportStar(__nccwpck_require__(4873), exports); +var hmacKey_js_1 = __nccwpck_require__(5891); +Object.defineProperty(exports, "HmacKey", ({ enumerable: true, get: function () { return hmacKey_js_1.HmacKey; } })); +var iam_js_1 = __nccwpck_require__(2880); +Object.defineProperty(exports, "Iam", ({ enumerable: true, get: function () { return iam_js_1.Iam; } })); +var notification_js_1 = __nccwpck_require__(8598); +Object.defineProperty(exports, "Notification", ({ enumerable: true, get: function () { return notification_js_1.Notification; } })); +__exportStar(__nccwpck_require__(4), exports); + + +/***/ }), + +/***/ 1325: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.util = exports.ApiError = exports.ServiceObject = exports.Service = void 0; +var service_js_1 = __nccwpck_require__(8542); +Object.defineProperty(exports, "Service", ({ enumerable: true, get: function () { return service_js_1.Service; } })); +var service_object_js_1 = __nccwpck_require__(2908); +Object.defineProperty(exports, "ServiceObject", ({ enumerable: true, get: function () { return service_object_js_1.ServiceObject; } })); +var util_js_1 = __nccwpck_require__(7325); +Object.defineProperty(exports, "ApiError", ({ enumerable: true, get: function () { return util_js_1.ApiError; } })); +Object.defineProperty(exports, "util", ({ enumerable: true, get: function () { return util_js_1.util; } })); + + +/***/ }), + +/***/ 2908: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ServiceObject = void 0; +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +const promisify_1 = __nccwpck_require__(206); +const events_1 = __nccwpck_require__(4434); +const util_js_1 = __nccwpck_require__(7325); +/** + * ServiceObject is a base class, meant to be inherited from by a "service + * object," like a BigQuery dataset or Storage bucket. + * + * Most of the time, these objects share common functionality; they can be + * created or deleted, and you can get or set their metadata. + * + * By inheriting from this class, a service object will be extended with these + * shared behaviors. Note that any method can be overridden when the service + * object requires specific behavior. + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +class ServiceObject extends events_1.EventEmitter { + /* + * @constructor + * @alias module:common/service-object + * + * @private + * + * @param {object} config - Configuration object. + * @param {string} config.baseUrl - The base URL to make API requests to. + * @param {string} config.createMethod - The method which creates this object. + * @param {string=} config.id - The identifier of the object. For example, the + * name of a Storage bucket or Pub/Sub topic. + * @param {object=} config.methods - A map of each method name that should be inherited. + * @param {object} config.methods[].reqOpts - Default request options for this + * particular method. A common use case is when `setMetadata` requires a + * `PUT` method to override the default `PATCH`. + * @param {object} config.parent - The parent service instance. For example, an + * instance of Storage if the object is Bucket. + */ + constructor(config) { + super(); + this.metadata = {}; + this.baseUrl = config.baseUrl; + this.parent = config.parent; // Parent class. + this.id = config.id; // Name or ID (e.g. dataset ID, bucket name, etc). + this.createMethod = config.createMethod; + this.methods = config.methods || {}; + this.interceptors = []; + this.projectId = config.projectId; + if (config.methods) { + // This filters the ServiceObject instance (e.g. a "File") to only have + // the configured methods. We make a couple of exceptions for core- + // functionality ("request()" and "getRequestInterceptors()") + Object.getOwnPropertyNames(ServiceObject.prototype) + .filter(methodName => { + return ( + // All ServiceObjects need `request` and `getRequestInterceptors`. + // clang-format off + !/^request/.test(methodName) && + !/^getRequestInterceptors/.test(methodName) && + // clang-format on + // The ServiceObject didn't redefine the method. + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this[methodName] === + // eslint-disable-next-line @typescript-eslint/no-explicit-any + ServiceObject.prototype[methodName] && + // This method isn't wanted. + !config.methods[methodName]); + }) + .forEach(methodName => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this[methodName] = undefined; + }); + } + } + create(optionsOrCallback, callback) { + // eslint-disable-next-line @typescript-eslint/no-this-alias + const self = this; + const args = [this.id]; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + if (typeof optionsOrCallback === 'object') { + args.push(optionsOrCallback); + } + // Wrap the callback to return *this* instance of the object, not the + // newly-created one. + // tslint: disable-next-line no-any + function onCreate(...args) { + const [err, instance] = args; + if (!err) { + self.metadata = instance.metadata; + if (self.id && instance.metadata) { + self.id = instance.metadata.id; + } + args[1] = self; // replace the created `instance` with this one. + } + callback(...args); + } + args.push(onCreate); + // eslint-disable-next-line prefer-spread + this.createMethod.apply(null, args); + } + delete(optionsOrCallback, cb) { + var _a; + const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + const ignoreNotFound = options.ignoreNotFound; + delete options.ignoreNotFound; + const methodConfig = (typeof this.methods.delete === 'object' && this.methods.delete) || {}; + const reqOpts = { + method: 'DELETE', + uri: '', + ...methodConfig.reqOpts, + qs: { + ...(_a = methodConfig.reqOpts) === null || _a === void 0 ? void 0 : _a.qs, + ...options, + }, + }; + // The `request` method may have been overridden to hold any special + // behavior. Ensure we call the original `request` method. + ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => { + if (err) { + if (err.code === 404 && ignoreNotFound) { + err = null; + } + } + callback(err, res); + }); + } + exists(optionsOrCallback, cb) { + const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + this.get(options, err => { + if (err) { + if (err.code === 404) { + callback(null, false); + } + else { + callback(err); + } + return; + } + callback(null, true); + }); + } + get(optionsOrCallback, cb) { + // eslint-disable-next-line @typescript-eslint/no-this-alias + const self = this; + const [opts, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + const options = Object.assign({}, opts); + const autoCreate = options.autoCreate && typeof this.create === 'function'; + delete options.autoCreate; + function onCreate(err, instance, apiResponse) { + if (err) { + if (err.code === 409) { + self.get(options, callback); + return; + } + callback(err, null, apiResponse); + return; + } + callback(null, instance, apiResponse); + } + this.getMetadata(options, (err, metadata) => { + if (err) { + if (err.code === 404 && autoCreate) { + const args = []; + if (Object.keys(options).length > 0) { + args.push(options); + } + args.push(onCreate); + self.create(...args); + return; + } + callback(err, null, metadata); + return; + } + callback(null, self, metadata); + }); + } + getMetadata(optionsOrCallback, cb) { + var _a; + const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + const methodConfig = (typeof this.methods.getMetadata === 'object' && + this.methods.getMetadata) || + {}; + const reqOpts = { + uri: '', + ...methodConfig.reqOpts, + qs: { + ...(_a = methodConfig.reqOpts) === null || _a === void 0 ? void 0 : _a.qs, + ...options, + }, + }; + // The `request` method may have been overridden to hold any special + // behavior. Ensure we call the original `request` method. + ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => { + this.metadata = body; + callback(err, this.metadata, res); + }); + } + /** + * Return the user's custom request interceptors. + */ + getRequestInterceptors() { + // Interceptors should be returned in the order they were assigned. + const localInterceptors = this.interceptors + .filter(interceptor => typeof interceptor.request === 'function') + .map(interceptor => interceptor.request); + return this.parent.getRequestInterceptors().concat(localInterceptors); + } + setMetadata(metadata, optionsOrCallback, cb) { + var _a, _b; + const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + const methodConfig = (typeof this.methods.setMetadata === 'object' && + this.methods.setMetadata) || + {}; + const reqOpts = { + method: 'PATCH', + uri: '', + ...methodConfig.reqOpts, + json: { + ...(_a = methodConfig.reqOpts) === null || _a === void 0 ? void 0 : _a.json, + ...metadata, + }, + qs: { + ...(_b = methodConfig.reqOpts) === null || _b === void 0 ? void 0 : _b.qs, + ...options, + }, + }; + // The `request` method may have been overridden to hold any special + // behavior. Ensure we call the original `request` method. + ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => { + this.metadata = body; + callback(err, this.metadata, res); + }); + } + request_(reqOpts, callback) { + reqOpts = { ...reqOpts }; + if (this.projectId) { + reqOpts.projectId = this.projectId; + } + const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0; + const uriComponents = [this.baseUrl, this.id || '', reqOpts.uri]; + if (isAbsoluteUrl) { + uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri)); + } + reqOpts.uri = uriComponents + .filter(x => x.trim()) // Limit to non-empty strings. + .map(uriComponent => { + const trimSlashesRegex = /^\/*|\/*$/g; + return uriComponent.replace(trimSlashesRegex, ''); + }) + .join('/'); + const childInterceptors = Array.isArray(reqOpts.interceptors_) + ? reqOpts.interceptors_ + : []; + const localInterceptors = [].slice.call(this.interceptors); + reqOpts.interceptors_ = childInterceptors.concat(localInterceptors); + if (reqOpts.shouldReturnStream) { + return this.parent.requestStream(reqOpts); + } + this.parent.request(reqOpts, callback); + } + request(reqOpts, callback) { + this.request_(reqOpts, callback); + } + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + */ + requestStream(reqOpts) { + const opts = { ...reqOpts, shouldReturnStream: true }; + return this.request_(opts); + } +} +exports.ServiceObject = ServiceObject; +(0, promisify_1.promisifyAll)(ServiceObject, { exclude: ['getRequestInterceptors'] }); + + +/***/ }), + +/***/ 8542: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Service = exports.DEFAULT_PROJECT_ID_TOKEN = void 0; +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +const google_auth_library_1 = __nccwpck_require__(492); +const uuid = __importStar(__nccwpck_require__(2048)); +const util_js_1 = __nccwpck_require__(7325); +const util_js_2 = __nccwpck_require__(4557); +exports.DEFAULT_PROJECT_ID_TOKEN = '{{projectId}}'; +class Service { + /** + * Service is a base class, meant to be inherited from by a "service," like + * BigQuery or Storage. + * + * This handles making authenticated requests by exposing a `makeReq_` + * function. + * + * @constructor + * @alias module:common/service + * + * @param {object} config - Configuration object. + * @param {string} config.baseUrl - The base URL to make API requests to. + * @param {string[]} config.scopes - The scopes required for the request. + * @param {object=} options - [Configuration object](#/docs). + */ + constructor(config, options = {}) { + this.baseUrl = config.baseUrl; + this.apiEndpoint = config.apiEndpoint; + this.timeout = options.timeout; + this.globalInterceptors = Array.isArray(options.interceptors_) + ? options.interceptors_ + : []; + this.interceptors = []; + this.packageJson = config.packageJson; + this.projectId = options.projectId || exports.DEFAULT_PROJECT_ID_TOKEN; + this.projectIdRequired = config.projectIdRequired !== false; + this.providedUserAgent = options.userAgent; + this.universeDomain = options.universeDomain || google_auth_library_1.DEFAULT_UNIVERSE; + this.customEndpoint = config.customEndpoint || false; + this.makeAuthenticatedRequest = util_js_1.util.makeAuthenticatedRequestFactory({ + ...config, + projectIdRequired: this.projectIdRequired, + projectId: this.projectId, + authClient: options.authClient || config.authClient, + credentials: options.credentials, + keyFile: options.keyFilename, + email: options.email, + clientOptions: { + universeDomain: options.universeDomain, + ...options.clientOptions, + }, + }); + this.authClient = this.makeAuthenticatedRequest.authClient; + const isCloudFunctionEnv = !!process.env.FUNCTION_NAME; + if (isCloudFunctionEnv) { + this.interceptors.push({ + request(reqOpts) { + reqOpts.forever = false; + return reqOpts; + }, + }); + } + } + /** + * Return the user's custom request interceptors. + */ + getRequestInterceptors() { + // Interceptors should be returned in the order they were assigned. + return [].slice + .call(this.globalInterceptors) + .concat(this.interceptors) + .filter(interceptor => typeof interceptor.request === 'function') + .map(interceptor => interceptor.request); + } + getProjectId(callback) { + if (!callback) { + return this.getProjectIdAsync(); + } + this.getProjectIdAsync().then(p => callback(null, p), callback); + } + async getProjectIdAsync() { + const projectId = await this.authClient.getProjectId(); + if (this.projectId === exports.DEFAULT_PROJECT_ID_TOKEN && projectId) { + this.projectId = projectId; + } + return this.projectId; + } + request_(reqOpts, callback) { + reqOpts = { ...reqOpts, timeout: this.timeout }; + const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0; + const uriComponents = [this.baseUrl]; + if (this.projectIdRequired) { + if (reqOpts.projectId) { + uriComponents.push('projects'); + uriComponents.push(reqOpts.projectId); + } + else { + uriComponents.push('projects'); + uriComponents.push(this.projectId); + } + } + uriComponents.push(reqOpts.uri); + if (isAbsoluteUrl) { + uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri)); + } + reqOpts.uri = uriComponents + .map(uriComponent => { + const trimSlashesRegex = /^\/*|\/*$/g; + return uriComponent.replace(trimSlashesRegex, ''); + }) + .join('/') + // Some URIs have colon separators. + // Bad: https://.../projects/:list + // Good: https://.../projects:list + .replace(/\/:/g, ':'); + const requestInterceptors = this.getRequestInterceptors(); + const interceptorArray = Array.isArray(reqOpts.interceptors_) + ? reqOpts.interceptors_ + : []; + interceptorArray.forEach(interceptor => { + if (typeof interceptor.request === 'function') { + requestInterceptors.push(interceptor.request); + } + }); + requestInterceptors.forEach(requestInterceptor => { + reqOpts = requestInterceptor(reqOpts); + }); + delete reqOpts.interceptors_; + const pkg = this.packageJson; + let userAgent = (0, util_js_2.getUserAgentString)(); + if (this.providedUserAgent) { + userAgent = `${this.providedUserAgent} ${userAgent}`; + } + reqOpts.headers = { + ...reqOpts.headers, + 'User-Agent': userAgent, + 'x-goog-api-client': `${(0, util_js_2.getRuntimeTrackingString)()} gccl/${pkg.version}-${(0, util_js_2.getModuleFormat)()} gccl-invocation-id/${uuid.v4()}`, + }; + if (reqOpts[util_js_1.GCCL_GCS_CMD_KEY]) { + reqOpts.headers['x-goog-api-client'] += + ` gccl-gcs-cmd/${reqOpts[util_js_1.GCCL_GCS_CMD_KEY]}`; + } + if (reqOpts.shouldReturnStream) { + return this.makeAuthenticatedRequest(reqOpts); + } + else { + this.makeAuthenticatedRequest(reqOpts, callback); + } + } + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + * @param {function} callback - The callback function passed to `request`. + */ + request(reqOpts, callback) { + Service.prototype.request_.call(this, reqOpts, callback); + } + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + */ + requestStream(reqOpts) { + const opts = { ...reqOpts, shouldReturnStream: true }; + return Service.prototype.request_.call(this, opts); + } +} +exports.Service = Service; + + +/***/ }), + +/***/ 7325: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.util = exports.Util = exports.PartialFailureError = exports.ApiError = exports.GCCL_GCS_CMD_KEY = void 0; +/*! + * @module common/util + */ +const projectify_1 = __nccwpck_require__(7635); +const htmlEntities = __importStar(__nccwpck_require__(3660)); +const google_auth_library_1 = __nccwpck_require__(492); +const retry_request_1 = __importDefault(__nccwpck_require__(7842)); +const stream_1 = __nccwpck_require__(2203); +const teeny_request_1 = __nccwpck_require__(321); +const uuid = __importStar(__nccwpck_require__(2048)); +const service_js_1 = __nccwpck_require__(8542); +const util_js_1 = __nccwpck_require__(4557); +const duplexify_1 = __importDefault(__nccwpck_require__(9112)); +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +const package_json_helper_cjs_1 = __nccwpck_require__(1969); +const packageJson = (0, package_json_helper_cjs_1.getPackageJSON)(); +/** + * A unique symbol for providing a `gccl-gcs-cmd` value + * for the `X-Goog-API-Client` header. + * + * E.g. the `V` in `X-Goog-API-Client: gccl-gcs-cmd/V` + **/ +exports.GCCL_GCS_CMD_KEY = Symbol.for('GCCL_GCS_CMD'); +const requestDefaults = { + timeout: 60000, + gzip: true, + forever: true, + pool: { + maxSockets: Infinity, + }, +}; +/** + * Default behavior: Automatically retry retriable server errors. + * + * @const {boolean} + * @private + */ +const AUTO_RETRY_DEFAULT = true; +/** + * Default behavior: Only attempt to retry retriable errors 3 times. + * + * @const {number} + * @private + */ +const MAX_RETRY_DEFAULT = 3; +/** + * Custom error type for API errors. + * + * @param {object} errorBody - Error object. + */ +class ApiError extends Error { + constructor(errorBodyOrMessage) { + super(); + if (typeof errorBodyOrMessage !== 'object') { + this.message = errorBodyOrMessage || ''; + return; + } + const errorBody = errorBodyOrMessage; + this.code = errorBody.code; + this.errors = errorBody.errors; + this.response = errorBody.response; + try { + this.errors = JSON.parse(this.response.body).error.errors; + } + catch (e) { + this.errors = errorBody.errors; + } + this.message = ApiError.createMultiErrorMessage(errorBody, this.errors); + Error.captureStackTrace(this); + } + /** + * Pieces together an error message by combining all unique error messages + * returned from a single GoogleError + * + * @private + * + * @param {GoogleErrorBody} err The original error. + * @param {GoogleInnerError[]} [errors] Inner errors, if any. + * @returns {string} + */ + static createMultiErrorMessage(err, errors) { + const messages = new Set(); + if (err.message) { + messages.add(err.message); + } + if (errors && errors.length) { + errors.forEach(({ message }) => messages.add(message)); + } + else if (err.response && err.response.body) { + messages.add(htmlEntities.decode(err.response.body.toString())); + } + else if (!err.message) { + messages.add('A failure occurred during this request.'); + } + let messageArr = Array.from(messages); + if (messageArr.length > 1) { + messageArr = messageArr.map((message, i) => ` ${i + 1}. ${message}`); + messageArr.unshift('Multiple errors occurred during the request. Please see the `errors` array for complete details.\n'); + messageArr.push('\n'); + } + return messageArr.join('\n'); + } +} +exports.ApiError = ApiError; +/** + * Custom error type for partial errors returned from the API. + * + * @param {object} b - Error object. + */ +class PartialFailureError extends Error { + constructor(b) { + super(); + const errorObject = b; + this.errors = errorObject.errors; + this.name = 'PartialFailureError'; + this.response = errorObject.response; + this.message = ApiError.createMultiErrorMessage(errorObject, this.errors); + } +} +exports.PartialFailureError = PartialFailureError; +class Util { + constructor() { + this.ApiError = ApiError; + this.PartialFailureError = PartialFailureError; + } + /** + * No op. + * + * @example + * function doSomething(callback) { + * callback = callback || noop; + * } + */ + noop() { } + /** + * Uniformly process an API response. + * + * @param {*} err - Error value. + * @param {*} resp - Response value. + * @param {*} body - Body value. + * @param {function} callback - The callback function. + */ + handleResp(err, resp, body, callback) { + callback = callback || util.noop; + const parsedResp = { + err: err || null, + ...(resp && util.parseHttpRespMessage(resp)), + ...(body && util.parseHttpRespBody(body)), + }; + // Assign the parsed body to resp.body, even if { json: false } was passed + // as a request option. + // We assume that nobody uses the previously unparsed value of resp.body. + if (!parsedResp.err && resp && typeof parsedResp.body === 'object') { + parsedResp.resp.body = parsedResp.body; + } + if (parsedResp.err && resp) { + parsedResp.err.response = resp; + } + callback(parsedResp.err, parsedResp.body, parsedResp.resp); + } + /** + * Sniff an incoming HTTP response message for errors. + * + * @param {object} httpRespMessage - An incoming HTTP response message from `request`. + * @return {object} parsedHttpRespMessage - The parsed response. + * @param {?error} parsedHttpRespMessage.err - An error detected. + * @param {object} parsedHttpRespMessage.resp - The original response object. + */ + parseHttpRespMessage(httpRespMessage) { + const parsedHttpRespMessage = { + resp: httpRespMessage, + }; + if (httpRespMessage.statusCode < 200 || httpRespMessage.statusCode > 299) { + // Unknown error. Format according to ApiError standard. + parsedHttpRespMessage.err = new ApiError({ + errors: new Array(), + code: httpRespMessage.statusCode, + message: httpRespMessage.statusMessage, + response: httpRespMessage, + }); + } + return parsedHttpRespMessage; + } + /** + * Parse the response body from an HTTP request. + * + * @param {object} body - The response body. + * @return {object} parsedHttpRespMessage - The parsed response. + * @param {?error} parsedHttpRespMessage.err - An error detected. + * @param {object} parsedHttpRespMessage.body - The original body value provided + * will try to be JSON.parse'd. If it's successful, the parsed value will + * be returned here, otherwise the original value and an error will be returned. + */ + parseHttpRespBody(body) { + const parsedHttpRespBody = { + body, + }; + if (typeof body === 'string') { + try { + parsedHttpRespBody.body = JSON.parse(body); + } + catch (err) { + parsedHttpRespBody.body = body; + } + } + if (parsedHttpRespBody.body && parsedHttpRespBody.body.error) { + // Error from JSON API. + parsedHttpRespBody.err = new ApiError(parsedHttpRespBody.body.error); + } + return parsedHttpRespBody; + } + /** + * Take a Duplexify stream, fetch an authenticated connection header, and + * create an outgoing writable stream. + * + * @param {Duplexify} dup - Duplexify stream. + * @param {object} options - Configuration object. + * @param {module:common/connection} options.connection - A connection instance used to get a token with and send the request through. + * @param {object} options.metadata - Metadata to send at the head of the request. + * @param {object} options.request - Request object, in the format of a standard Node.js http.request() object. + * @param {string=} options.request.method - Default: "POST". + * @param {string=} options.request.qs.uploadType - Default: "multipart". + * @param {string=} options.streamContentType - Default: "application/octet-stream". + * @param {function} onComplete - Callback, executed after the writable Request stream has completed. + */ + makeWritableStream(dup, options, onComplete) { + var _a; + onComplete = onComplete || util.noop; + const writeStream = new ProgressStream(); + writeStream.on('progress', evt => dup.emit('progress', evt)); + dup.setWritable(writeStream); + const defaultReqOpts = { + method: 'POST', + qs: { + uploadType: 'multipart', + }, + timeout: 0, + maxRetries: 0, + }; + const metadata = options.metadata || {}; + const reqOpts = { + ...defaultReqOpts, + ...options.request, + qs: { + ...defaultReqOpts.qs, + ...(_a = options.request) === null || _a === void 0 ? void 0 : _a.qs, + }, + multipart: [ + { + 'Content-Type': 'application/json', + body: JSON.stringify(metadata), + }, + { + 'Content-Type': metadata.contentType || 'application/octet-stream', + body: writeStream, + }, + ], + }; + options.makeAuthenticatedRequest(reqOpts, { + onAuthenticated(err, authenticatedReqOpts) { + if (err) { + dup.destroy(err); + return; + } + requestDefaults.headers = util._getDefaultHeaders(reqOpts[exports.GCCL_GCS_CMD_KEY]); + const request = teeny_request_1.teenyRequest.defaults(requestDefaults); + request(authenticatedReqOpts, (err, resp, body) => { + util.handleResp(err, resp, body, (err, data) => { + if (err) { + dup.destroy(err); + return; + } + dup.emit('response', resp); + onComplete(data); + }); + }); + }, + }); + } + /** + * Returns true if the API request should be retried, given the error that was + * given the first time the request was attempted. This is used for rate limit + * related errors as well as intermittent server errors. + * + * @param {error} err - The API error to check if it is appropriate to retry. + * @return {boolean} True if the API request should be retried, false otherwise. + */ + shouldRetryRequest(err) { + if (err) { + if ([408, 429, 500, 502, 503, 504].indexOf(err.code) !== -1) { + return true; + } + if (err.errors) { + for (const e of err.errors) { + const reason = e.reason; + if (reason === 'rateLimitExceeded') { + return true; + } + if (reason === 'userRateLimitExceeded') { + return true; + } + if (reason && reason.includes('EAI_AGAIN')) { + return true; + } + } + } + } + return false; + } + /** + * Get a function for making authenticated requests. + * + * @param {object} config - Configuration object. + * @param {boolean=} config.autoRetry - Automatically retry requests if the + * response is related to rate limits or certain intermittent server + * errors. We will exponentially backoff subsequent requests by default. + * (default: true) + * @param {object=} config.credentials - Credentials object. + * @param {boolean=} config.customEndpoint - If true, just return the provided request options. Default: false. + * @param {boolean=} config.useAuthWithCustomEndpoint - If true, will authenticate when using a custom endpoint. Default: false. + * @param {string=} config.email - Account email address, required for PEM/P12 usage. + * @param {number=} config.maxRetries - Maximum number of automatic retries attempted before returning the error. (default: 3) + * @param {string=} config.keyFile - Path to a .json, .pem, or .p12 keyfile. + * @param {array} config.scopes - Array of scopes required for the API. + */ + makeAuthenticatedRequestFactory(config) { + const googleAutoAuthConfig = { ...config }; + if (googleAutoAuthConfig.projectId === service_js_1.DEFAULT_PROJECT_ID_TOKEN) { + delete googleAutoAuthConfig.projectId; + } + let authClient; + if (googleAutoAuthConfig.authClient instanceof google_auth_library_1.GoogleAuth) { + // Use an existing `GoogleAuth` + authClient = googleAutoAuthConfig.authClient; + } + else { + // Pass an `AuthClient` & `clientOptions` to `GoogleAuth`, if available + authClient = new google_auth_library_1.GoogleAuth({ + ...googleAutoAuthConfig, + authClient: googleAutoAuthConfig.authClient, + clientOptions: googleAutoAuthConfig.clientOptions, + }); + } + function makeAuthenticatedRequest(reqOpts, optionsOrCallback) { + let stream; + let projectId; + const reqConfig = { ...config }; + let activeRequest_; + if (!optionsOrCallback) { + stream = (0, duplexify_1.default)(); + reqConfig.stream = stream; + } + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : undefined; + const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : undefined; + async function setProjectId() { + projectId = await authClient.getProjectId(); + } + const onAuthenticated = async (err, authenticatedReqOpts) => { + const authLibraryError = err; + const autoAuthFailed = err && + typeof err.message === 'string' && + err.message.indexOf('Could not load the default credentials') > -1; + if (autoAuthFailed) { + // Even though authentication failed, the API might not actually + // care. + authenticatedReqOpts = reqOpts; + } + if (!err || autoAuthFailed) { + try { + // Try with existing `projectId` value + authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId); + err = null; + } + catch (e) { + if (e instanceof projectify_1.MissingProjectIdError) { + // A `projectId` was required, but we don't have one. + try { + // Attempt to get the `projectId` + await setProjectId(); + authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId); + err = null; + } + catch (e) { + // Re-use the "Could not load the default credentials error" if + // auto auth failed. + err = err || e; + } + } + else { + // Some other error unrelated to missing `projectId` + err = err || e; + } + } + } + if (err) { + if (stream) { + stream.destroy(err); + } + else { + const fn = options && options.onAuthenticated + ? options.onAuthenticated + : callback; + fn(err); + } + return; + } + if (options && options.onAuthenticated) { + options.onAuthenticated(null, authenticatedReqOpts); + } + else { + activeRequest_ = util.makeRequest(authenticatedReqOpts, reqConfig, (apiResponseError, ...params) => { + if (apiResponseError && + apiResponseError.code === 401 && + authLibraryError) { + // Re-use the "Could not load the default credentials error" if + // the API request failed due to missing credentials. + apiResponseError = authLibraryError; + } + callback(apiResponseError, ...params); + }); + } + }; + const prepareRequest = async () => { + try { + const getProjectId = async () => { + if (config.projectId && + config.projectId !== service_js_1.DEFAULT_PROJECT_ID_TOKEN) { + // The user provided a project ID. We don't need to check with the + // auth client, it could be incorrect. + return config.projectId; + } + if (config.projectIdRequired === false) { + // A projectId is not required. Return the default. + return service_js_1.DEFAULT_PROJECT_ID_TOKEN; + } + return setProjectId(); + }; + const authorizeRequest = async () => { + if (reqConfig.customEndpoint && + !reqConfig.useAuthWithCustomEndpoint) { + // Using a custom API override. Do not use `google-auth-library` for + // authentication. (ex: connecting to a local Datastore server) + return reqOpts; + } + else { + return authClient.authorizeRequest(reqOpts); + } + }; + const [_projectId, authorizedReqOpts] = await Promise.all([ + getProjectId(), + authorizeRequest(), + ]); + if (_projectId) { + projectId = _projectId; + } + return onAuthenticated(null, authorizedReqOpts); + } + catch (e) { + return onAuthenticated(e); + } + }; + prepareRequest(); + if (stream) { + return stream; + } + return { + abort() { + setImmediate(() => { + if (activeRequest_) { + activeRequest_.abort(); + activeRequest_ = null; + } + }); + }, + }; + } + const mar = makeAuthenticatedRequest; + mar.getCredentials = authClient.getCredentials.bind(authClient); + mar.authClient = authClient; + return mar; + } + /** + * Make a request through the `retryRequest` module with built-in error + * handling and exponential back off. + * + * @param {object} reqOpts - Request options in the format `request` expects. + * @param {object=} config - Configuration object. + * @param {boolean=} config.autoRetry - Automatically retry requests if the + * response is related to rate limits or certain intermittent server + * errors. We will exponentially backoff subsequent requests by default. + * (default: true) + * @param {number=} config.maxRetries - Maximum number of automatic retries + * attempted before returning the error. (default: 3) + * @param {object=} config.request - HTTP module for request calls. + * @param {function} callback - The callback function. + */ + makeRequest(reqOpts, config, callback) { + var _a, _b, _c, _d, _e; + let autoRetryValue = AUTO_RETRY_DEFAULT; + if (config.autoRetry !== undefined) { + autoRetryValue = config.autoRetry; + } + else if (((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.autoRetry) !== undefined) { + autoRetryValue = config.retryOptions.autoRetry; + } + let maxRetryValue = MAX_RETRY_DEFAULT; + if (config.maxRetries !== undefined) { + maxRetryValue = config.maxRetries; + } + else if (((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.maxRetries) !== undefined) { + maxRetryValue = config.retryOptions.maxRetries; + } + requestDefaults.headers = this._getDefaultHeaders(reqOpts[exports.GCCL_GCS_CMD_KEY]); + const options = { + request: teeny_request_1.teenyRequest.defaults(requestDefaults), + retries: autoRetryValue !== false ? maxRetryValue : 0, + noResponseRetries: autoRetryValue !== false ? maxRetryValue : 0, + shouldRetryFn(httpRespMessage) { + var _a, _b; + const err = util.parseHttpRespMessage(httpRespMessage).err; + if ((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.retryableErrorFn) { + return err && ((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.retryableErrorFn(err)); + } + return err && util.shouldRetryRequest(err); + }, + maxRetryDelay: (_c = config.retryOptions) === null || _c === void 0 ? void 0 : _c.maxRetryDelay, + retryDelayMultiplier: (_d = config.retryOptions) === null || _d === void 0 ? void 0 : _d.retryDelayMultiplier, + totalTimeout: (_e = config.retryOptions) === null || _e === void 0 ? void 0 : _e.totalTimeout, + }; + if (typeof reqOpts.maxRetries === 'number') { + options.retries = reqOpts.maxRetries; + options.noResponseRetries = reqOpts.maxRetries; + } + if (!config.stream) { + return (0, retry_request_1.default)(reqOpts, options, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (err, response, body) => { + util.handleResp(err, response, body, callback); + }); + } + const dup = config.stream; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let requestStream; + const isGetRequest = (reqOpts.method || 'GET').toUpperCase() === 'GET'; + if (isGetRequest) { + requestStream = (0, retry_request_1.default)(reqOpts, options); + dup.setReadable(requestStream); + } + else { + // Streaming writable HTTP requests cannot be retried. + requestStream = options.request(reqOpts); + dup.setWritable(requestStream); + } + // Replay the Request events back to the stream. + requestStream + .on('error', dup.destroy.bind(dup)) + .on('response', dup.emit.bind(dup, 'response')) + .on('complete', dup.emit.bind(dup, 'complete')); + dup.abort = requestStream.abort; + return dup; + } + /** + * Decorate the options about to be made in a request. + * + * @param {object} reqOpts - The options to be passed to `request`. + * @param {string} projectId - The project ID. + * @return {object} reqOpts - The decorated reqOpts. + */ + decorateRequest(reqOpts, projectId) { + delete reqOpts.autoPaginate; + delete reqOpts.autoPaginateVal; + delete reqOpts.objectMode; + if (reqOpts.qs !== null && typeof reqOpts.qs === 'object') { + delete reqOpts.qs.autoPaginate; + delete reqOpts.qs.autoPaginateVal; + reqOpts.qs = (0, projectify_1.replaceProjectIdToken)(reqOpts.qs, projectId); + } + if (Array.isArray(reqOpts.multipart)) { + reqOpts.multipart = reqOpts.multipart.map(part => { + return (0, projectify_1.replaceProjectIdToken)(part, projectId); + }); + } + if (reqOpts.json !== null && typeof reqOpts.json === 'object') { + delete reqOpts.json.autoPaginate; + delete reqOpts.json.autoPaginateVal; + reqOpts.json = (0, projectify_1.replaceProjectIdToken)(reqOpts.json, projectId); + } + reqOpts.uri = (0, projectify_1.replaceProjectIdToken)(reqOpts.uri, projectId); + return reqOpts; + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + isCustomType(unknown, module) { + function getConstructorName(obj) { + return obj.constructor && obj.constructor.name.toLowerCase(); + } + const moduleNameParts = module.split('/'); + const parentModuleName = moduleNameParts[0] && moduleNameParts[0].toLowerCase(); + const subModuleName = moduleNameParts[1] && moduleNameParts[1].toLowerCase(); + if (subModuleName && getConstructorName(unknown) !== subModuleName) { + return false; + } + let walkingModule = unknown; + // eslint-disable-next-line no-constant-condition + while (true) { + if (getConstructorName(walkingModule) === parentModuleName) { + return true; + } + walkingModule = walkingModule.parent; + if (!walkingModule) { + return false; + } + } + } + /** + * Given two parameters, figure out if this is either: + * - Just a callback function + * - An options object, and then a callback function + * @param optionsOrCallback An options object or callback. + * @param cb A potentially undefined callback. + */ + maybeOptionsOrCallback(optionsOrCallback, cb) { + return typeof optionsOrCallback === 'function' + ? [{}, optionsOrCallback] + : [optionsOrCallback, cb]; + } + _getDefaultHeaders(gcclGcsCmd) { + const headers = { + 'User-Agent': (0, util_js_1.getUserAgentString)(), + 'x-goog-api-client': `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${uuid.v4()}`, + }; + if (gcclGcsCmd) { + headers['x-goog-api-client'] += ` gccl-gcs-cmd/${gcclGcsCmd}`; + } + return headers; + } +} +exports.Util = Util; +/** + * Basic Passthrough Stream that records the number of bytes read + * every time the cursor is moved. + */ +class ProgressStream extends stream_1.Transform { + constructor() { + super(...arguments); + this.bytesRead = 0; + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + _transform(chunk, encoding, callback) { + this.bytesRead += chunk.length; + this.emit('progress', { bytesWritten: this.bytesRead, contentLength: '*' }); + this.push(chunk); + callback(); + } +} +const util = new Util(); +exports.util = util; + + +/***/ }), + +/***/ 8598: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Notification = void 0; +const index_js_1 = __nccwpck_require__(1325); +const promisify_1 = __nccwpck_require__(206); +/** + * The API-formatted resource description of the notification. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name Notification#metadata + * @type {object} + */ +/** + * A Notification object is created from your {@link Bucket} object using + * {@link Bucket#notification}. Use it to interact with Cloud Pub/Sub + * notifications. + * + * See {@link https://cloud.google.com/storage/docs/pubsub-notifications| Cloud Pub/Sub Notifications for Google Cloud Storage} + * + * @class + * @hideconstructor + * + * @param {Bucket} bucket The bucket instance this notification is attached to. + * @param {string} id The ID of the notification. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const notification = myBucket.notification('1'); + * ``` + */ +class Notification extends index_js_1.ServiceObject { + constructor(bucket, id) { + const requestQueryObject = {}; + const methods = { + /** + * Creates a notification subscription for the bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/insert| Notifications: insert} + * @method Notification#create + * + * @param {Topic|string} topic The Cloud PubSub topic to which this + * subscription publishes. If the project ID is omitted, the current + * project ID will be used. + * + * Acceptable formats are: + * - `projects/grape-spaceship-123/topics/my-topic` + * + * - `my-topic` + * @param {CreateNotificationRequest} [options] Metadata to set for + * the notification. + * @param {CreateNotificationCallback} [callback] Callback function. + * @returns {Promise} + * @throws {Error} If a valid topic is not provided. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.create(function(err, notification, apiResponse) { + * if (!err) { + * // The notification was created successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.create().then(function(data) { + * const notification = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + create: true, + /** + * @typedef {array} DeleteNotificationResponse + * @property {object} 0 The full API response. + */ + /** + * Permanently deletes a notification subscription. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/delete| Notifications: delete API Documentation} + * + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {DeleteNotificationCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.delete(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.delete().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/deleteNotification.js + * region_tag:storage_delete_bucket_notification + * Another example: + */ + delete: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * Get a notification and its metadata if it exists. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/get| Notifications: get API Documentation} + * + * @param {object} [options] Configuration options. + * See {@link Bucket#createNotification} for create options. + * @param {boolean} [options.autoCreate] Automatically create the object if + * it does not exist. Default: `false`. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetNotificationCallback} [callback] Callback function. + * @return {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.get(function(err, notification, apiResponse) { + * // `notification.metadata` has been populated. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.get().then(function(data) { + * const notification = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + get: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * Get the notification's metadata. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/get| Notifications: get API Documentation} + * + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetNotificationMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.getMetadata(function(err, metadata, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.getMetadata().then(function(data) { + * const metadata = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/getMetadataNotifications.js + * region_tag:storage_print_pubsub_bucket_notification + * Another example: + */ + getMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} NotificationExistsResponse + * @property {boolean} 0 Whether the notification exists or not. + */ + /** + * @callback NotificationExistsCallback + * @param {?Error} err Request error, if any. + * @param {boolean} exists Whether the notification exists or not. + */ + /** + * Check if the notification exists. + * + * @method Notification#exists + * @param {NotificationExistsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.exists(function(err, exists) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.exists().then(function(data) { + * const exists = data[0]; + * }); + * ``` + */ + exists: true, + }; + super({ + parent: bucket, + baseUrl: '/notificationConfigs', + id: id.toString(), + createMethod: bucket.createNotification.bind(bucket), + methods, + }); + } +} +exports.Notification = Notification; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Notification); + + +/***/ }), + +/***/ 8043: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +var _Upload_instances, _Upload_gcclGcsCmd, _Upload_resetLocalBuffersCache, _Upload_addLocalBufferCache; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Upload = exports.PROTOCOL_REGEX = void 0; +exports.upload = upload; +exports.createURI = createURI; +exports.checkUploadStatus = checkUploadStatus; +const abort_controller_1 = __importDefault(__nccwpck_require__(7413)); +const crypto_1 = __nccwpck_require__(6982); +const gaxios = __importStar(__nccwpck_require__(7003)); +const google_auth_library_1 = __nccwpck_require__(492); +const stream_1 = __nccwpck_require__(2203); +const async_retry_1 = __importDefault(__nccwpck_require__(5195)); +const uuid = __importStar(__nccwpck_require__(2048)); +const util_js_1 = __nccwpck_require__(4557); +const util_js_2 = __nccwpck_require__(7325); +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +const package_json_helper_cjs_1 = __nccwpck_require__(1969); +const NOT_FOUND_STATUS_CODE = 404; +const RESUMABLE_INCOMPLETE_STATUS_CODE = 308; +const packageJson = (0, package_json_helper_cjs_1.getPackageJSON)(); +exports.PROTOCOL_REGEX = /^(\w*):\/\//; +class Upload extends stream_1.Writable { + constructor(cfg) { + var _a; + super(cfg); + _Upload_instances.add(this); + this.numBytesWritten = 0; + this.numRetries = 0; + this.currentInvocationId = { + checkUploadStatus: uuid.v4(), + chunk: uuid.v4(), + uri: uuid.v4(), + }; + /** + * A cache of buffers written to this instance, ready for consuming + */ + this.writeBuffers = []; + this.numChunksReadInRequest = 0; + /** + * An array of buffers used for caching the most recent upload chunk. + * We should not assume that the server received all bytes sent in the request. + * - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + */ + this.localWriteCache = []; + this.localWriteCacheByteLength = 0; + this.upstreamEnded = false; + _Upload_gcclGcsCmd.set(this, void 0); + cfg = cfg || {}; + if (!cfg.bucket || !cfg.file) { + throw new Error('A bucket and file name are required'); + } + if (cfg.offset && !cfg.uri) { + throw new RangeError('Cannot provide an `offset` without providing a `uri`'); + } + if (cfg.isPartialUpload && !cfg.chunkSize) { + throw new RangeError('Cannot set `isPartialUpload` without providing a `chunkSize`'); + } + cfg.authConfig = cfg.authConfig || {}; + cfg.authConfig.scopes = [ + 'https://www.googleapis.com/auth/devstorage.full_control', + ]; + this.authClient = cfg.authClient || new google_auth_library_1.GoogleAuth(cfg.authConfig); + const universe = cfg.universeDomain || google_auth_library_1.DEFAULT_UNIVERSE; + this.apiEndpoint = `https://storage.${universe}`; + if (cfg.apiEndpoint && cfg.apiEndpoint !== this.apiEndpoint) { + this.apiEndpoint = this.sanitizeEndpoint(cfg.apiEndpoint); + const hostname = new URL(this.apiEndpoint).hostname; + // check if it is a domain of a known universe + const isDomain = hostname === universe; + const isDefaultUniverseDomain = hostname === google_auth_library_1.DEFAULT_UNIVERSE; + // check if it is a subdomain of a known universe + // by checking a last (universe's length + 1) of a hostname + const isSubDomainOfUniverse = hostname.slice(-(universe.length + 1)) === `.${universe}`; + const isSubDomainOfDefaultUniverse = hostname.slice(-(google_auth_library_1.DEFAULT_UNIVERSE.length + 1)) === + `.${google_auth_library_1.DEFAULT_UNIVERSE}`; + if (!isDomain && + !isDefaultUniverseDomain && + !isSubDomainOfUniverse && + !isSubDomainOfDefaultUniverse) { + // a custom, non-universe domain, + // use gaxios + this.authClient = gaxios; + } + } + this.baseURI = `${this.apiEndpoint}/upload/storage/v1/b`; + this.bucket = cfg.bucket; + const cacheKeyElements = [cfg.bucket, cfg.file]; + if (typeof cfg.generation === 'number') { + cacheKeyElements.push(`${cfg.generation}`); + } + this.cacheKey = cacheKeyElements.join('/'); + this.customRequestOptions = cfg.customRequestOptions || {}; + this.file = cfg.file; + this.generation = cfg.generation; + this.kmsKeyName = cfg.kmsKeyName; + this.metadata = cfg.metadata || {}; + this.offset = cfg.offset; + this.origin = cfg.origin; + this.params = cfg.params || {}; + this.userProject = cfg.userProject; + this.chunkSize = cfg.chunkSize; + this.retryOptions = cfg.retryOptions; + this.isPartialUpload = (_a = cfg.isPartialUpload) !== null && _a !== void 0 ? _a : false; + if (cfg.key) { + if (typeof cfg.key === 'string') { + const base64Key = Buffer.from(cfg.key).toString('base64'); + this.encryption = { + key: base64Key, + hash: (0, crypto_1.createHash)('sha256').update(cfg.key).digest('base64'), + }; + } + else { + const base64Key = cfg.key.toString('base64'); + this.encryption = { + key: base64Key, + hash: (0, crypto_1.createHash)('sha256').update(cfg.key).digest('base64'), + }; + } + } + this.predefinedAcl = cfg.predefinedAcl; + if (cfg.private) + this.predefinedAcl = 'private'; + if (cfg.public) + this.predefinedAcl = 'publicRead'; + const autoRetry = cfg.retryOptions.autoRetry; + this.uriProvidedManually = !!cfg.uri; + this.uri = cfg.uri; + if (this.offset) { + // we're resuming an incomplete upload + this.numBytesWritten = this.offset; + } + this.numRetries = 0; // counter for number of retries currently executed + if (!autoRetry) { + cfg.retryOptions.maxRetries = 0; + } + this.timeOfFirstRequest = Date.now(); + const contentLength = cfg.metadata + ? Number(cfg.metadata.contentLength) + : NaN; + this.contentLength = isNaN(contentLength) ? '*' : contentLength; + __classPrivateFieldSet(this, _Upload_gcclGcsCmd, cfg[util_js_2.GCCL_GCS_CMD_KEY], "f"); + this.once('writing', () => { + if (this.uri) { + this.continueUploading(); + } + else { + this.createURI(err => { + if (err) { + return this.destroy(err); + } + this.startUploading(); + return; + }); + } + }); + } + /** + * Prevent 'finish' event until the upload has succeeded. + * + * @param fireFinishEvent The finish callback + */ + _final(fireFinishEvent = () => { }) { + this.upstreamEnded = true; + this.once('uploadFinished', fireFinishEvent); + process.nextTick(() => { + this.emit('upstreamFinished'); + // it's possible `_write` may not be called - namely for empty object uploads + this.emit('writing'); + }); + } + /** + * Handles incoming data from upstream + * + * @param chunk The chunk to append to the buffer + * @param encoding The encoding of the chunk + * @param readCallback A callback for when the buffer has been read downstream + */ + _write(chunk, encoding, readCallback = () => { }) { + // Backwards-compatible event + this.emit('writing'); + this.writeBuffers.push(typeof chunk === 'string' ? Buffer.from(chunk, encoding) : chunk); + this.once('readFromChunkBuffer', readCallback); + process.nextTick(() => this.emit('wroteToChunkBuffer')); + } + /** + * Prepends the local buffer to write buffer and resets it. + * + * @param keepLastBytes number of bytes to keep from the end of the local buffer. + */ + prependLocalBufferToUpstream(keepLastBytes) { + // Typically, the upstream write buffers should be smaller than the local + // cache, so we can save time by setting the local cache as the new + // upstream write buffer array and appending the old array to it + let initialBuffers = []; + if (keepLastBytes) { + // we only want the last X bytes + let bytesKept = 0; + while (keepLastBytes > bytesKept) { + // load backwards because we want the last X bytes + // note: `localWriteCacheByteLength` is reset below + let buf = this.localWriteCache.pop(); + if (!buf) + break; + bytesKept += buf.byteLength; + if (bytesKept > keepLastBytes) { + // we have gone over the amount desired, let's keep the last X bytes + // of this buffer + const diff = bytesKept - keepLastBytes; + buf = buf.subarray(diff); + bytesKept -= diff; + } + initialBuffers.unshift(buf); + } + } + else { + // we're keeping all of the local cache, simply use it as the initial buffer + initialBuffers = this.localWriteCache; + } + // Append the old upstream to the new + const append = this.writeBuffers; + this.writeBuffers = initialBuffers; + for (const buf of append) { + this.writeBuffers.push(buf); + } + // reset last buffers sent + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); + } + /** + * Retrieves data from upstream's buffer. + * + * @param limit The maximum amount to return from the buffer. + */ + *pullFromChunkBuffer(limit) { + while (limit) { + const buf = this.writeBuffers.shift(); + if (!buf) + break; + let bufToYield = buf; + if (buf.byteLength > limit) { + bufToYield = buf.subarray(0, limit); + this.writeBuffers.unshift(buf.subarray(limit)); + limit = 0; + } + else { + limit -= buf.byteLength; + } + yield bufToYield; + // Notify upstream we've read from the buffer and we're able to consume + // more. It can also potentially send more data down as we're currently + // iterating. + this.emit('readFromChunkBuffer'); + } + } + /** + * A handler for determining if data is ready to be read from upstream. + * + * @returns If there will be more chunks to read in the future + */ + async waitForNextChunk() { + const willBeMoreChunks = await new Promise(resolve => { + // There's data available - it should be digested + if (this.writeBuffers.length) { + return resolve(true); + } + // The upstream writable ended, we shouldn't expect any more data. + if (this.upstreamEnded) { + return resolve(false); + } + // Nothing immediate seems to be determined. We need to prepare some + // listeners to determine next steps... + const wroteToChunkBufferCallback = () => { + removeListeners(); + return resolve(true); + }; + const upstreamFinishedCallback = () => { + removeListeners(); + // this should be the last chunk, if there's anything there + if (this.writeBuffers.length) + return resolve(true); + return resolve(false); + }; + // Remove listeners when we're ready to callback. + const removeListeners = () => { + this.removeListener('wroteToChunkBuffer', wroteToChunkBufferCallback); + this.removeListener('upstreamFinished', upstreamFinishedCallback); + }; + // If there's data recently written it should be digested + this.once('wroteToChunkBuffer', wroteToChunkBufferCallback); + // If the upstream finishes let's see if there's anything to grab + this.once('upstreamFinished', upstreamFinishedCallback); + }); + return willBeMoreChunks; + } + /** + * Reads data from upstream up to the provided `limit`. + * Ends when the limit has reached or no data is expected to be pushed from upstream. + * + * @param limit The most amount of data this iterator should return. `Infinity` by default. + */ + async *upstreamIterator(limit = Infinity) { + // read from upstream chunk buffer + while (limit && (await this.waitForNextChunk())) { + // read until end or limit has been reached + for (const chunk of this.pullFromChunkBuffer(limit)) { + limit -= chunk.byteLength; + yield chunk; + } + } + } + createURI(callback) { + if (!callback) { + return this.createURIAsync(); + } + this.createURIAsync().then(r => callback(null, r), callback); + } + async createURIAsync() { + const metadata = { ...this.metadata }; + const headers = {}; + // Delete content length and content type from metadata if they exist. + // These are headers and should not be sent as part of the metadata. + if (metadata.contentLength) { + headers['X-Upload-Content-Length'] = metadata.contentLength.toString(); + delete metadata.contentLength; + } + if (metadata.contentType) { + headers['X-Upload-Content-Type'] = metadata.contentType; + delete metadata.contentType; + } + let googAPIClient = `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${this.currentInvocationId.uri}`; + if (__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")) { + googAPIClient += ` gccl-gcs-cmd/${__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")}`; + } + // Check if headers already exist before creating new ones + const reqOpts = { + method: 'POST', + url: [this.baseURI, this.bucket, 'o'].join('/'), + params: Object.assign({ + name: this.file, + uploadType: 'resumable', + }, this.params), + data: metadata, + headers: { + 'User-Agent': (0, util_js_1.getUserAgentString)(), + 'x-goog-api-client': googAPIClient, + ...headers, + }, + }; + if (metadata.contentLength) { + reqOpts.headers['X-Upload-Content-Length'] = + metadata.contentLength.toString(); + } + if (metadata.contentType) { + reqOpts.headers['X-Upload-Content-Type'] = metadata.contentType; + } + if (typeof this.generation !== 'undefined') { + reqOpts.params.ifGenerationMatch = this.generation; + } + if (this.kmsKeyName) { + reqOpts.params.kmsKeyName = this.kmsKeyName; + } + if (this.predefinedAcl) { + reqOpts.params.predefinedAcl = this.predefinedAcl; + } + if (this.origin) { + reqOpts.headers.Origin = this.origin; + } + const uri = await (0, async_retry_1.default)(async (bail) => { + var _a, _b, _c; + try { + const res = await this.makeRequest(reqOpts); + // We have successfully got a URI we can now create a new invocation id + this.currentInvocationId.uri = uuid.v4(); + return res.headers.location; + } + catch (err) { + const e = err; + const apiError = { + code: (_a = e.response) === null || _a === void 0 ? void 0 : _a.status, + name: (_b = e.response) === null || _b === void 0 ? void 0 : _b.statusText, + message: (_c = e.response) === null || _c === void 0 ? void 0 : _c.statusText, + errors: [ + { + reason: e.code, + }, + ], + }; + if (this.retryOptions.maxRetries > 0 && + this.retryOptions.retryableErrorFn(apiError)) { + throw e; + } + else { + return bail(e); + } + } + }, { + retries: this.retryOptions.maxRetries, + factor: this.retryOptions.retryDelayMultiplier, + maxTimeout: this.retryOptions.maxRetryDelay * 1000, //convert to milliseconds + maxRetryTime: this.retryOptions.totalTimeout * 1000, //convert to milliseconds + }); + this.uri = uri; + this.offset = 0; + // emit the newly generated URI for future reuse, if necessary. + this.emit('uri', uri); + return uri; + } + async continueUploading() { + var _a; + (_a = this.offset) !== null && _a !== void 0 ? _a : (await this.getAndSetOffset()); + return this.startUploading(); + } + async startUploading() { + const multiChunkMode = !!this.chunkSize; + let responseReceived = false; + this.numChunksReadInRequest = 0; + if (!this.offset) { + this.offset = 0; + } + // Check if the offset (server) is too far behind the current stream + if (this.offset < this.numBytesWritten) { + const delta = this.numBytesWritten - this.offset; + const message = `The offset is lower than the number of bytes written. The server has ${this.offset} bytes and while ${this.numBytesWritten} bytes has been uploaded - thus ${delta} bytes are missing. Stopping as this could result in data loss. Initiate a new upload to continue.`; + this.emit('error', new RangeError(message)); + return; + } + // Check if we should 'fast-forward' to the relevant data to upload + if (this.numBytesWritten < this.offset) { + // 'fast-forward' to the byte where we need to upload. + // only push data from the byte after the one we left off on + const fastForwardBytes = this.offset - this.numBytesWritten; + for await (const _chunk of this.upstreamIterator(fastForwardBytes)) { + _chunk; // discard the data up until the point we want + } + this.numBytesWritten = this.offset; + } + let expectedUploadSize = undefined; + // Set `expectedUploadSize` to `contentLength - this.numBytesWritten`, if available + if (typeof this.contentLength === 'number') { + expectedUploadSize = this.contentLength - this.numBytesWritten; + } + // `expectedUploadSize` should be no more than the `chunkSize`. + // It's possible this is the last chunk request for a multiple + // chunk upload, thus smaller than the chunk size. + if (this.chunkSize) { + expectedUploadSize = expectedUploadSize + ? Math.min(this.chunkSize, expectedUploadSize) + : this.chunkSize; + } + // A queue for the upstream data + const upstreamQueue = this.upstreamIterator(expectedUploadSize); + // The primary read stream for this request. This stream retrieves no more + // than the exact requested amount from upstream. + const requestStream = new stream_1.Readable({ + read: async () => { + // Don't attempt to retrieve data upstream if we already have a response + if (responseReceived) + requestStream.push(null); + const result = await upstreamQueue.next(); + if (result.value) { + this.numChunksReadInRequest++; + if (multiChunkMode) { + // save ever buffer used in the request in multi-chunk mode + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_addLocalBufferCache).call(this, result.value); + } + else { + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_addLocalBufferCache).call(this, result.value); + } + this.numBytesWritten += result.value.byteLength; + this.emit('progress', { + bytesWritten: this.numBytesWritten, + contentLength: this.contentLength, + }); + requestStream.push(result.value); + } + if (result.done) { + requestStream.push(null); + } + }, + }); + let googAPIClient = `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${this.currentInvocationId.chunk}`; + if (__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")) { + googAPIClient += ` gccl-gcs-cmd/${__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")}`; + } + const headers = { + 'User-Agent': (0, util_js_1.getUserAgentString)(), + 'x-goog-api-client': googAPIClient, + }; + // If using multiple chunk upload, set appropriate header + if (multiChunkMode) { + // We need to know how much data is available upstream to set the `Content-Range` header. + // https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + for await (const chunk of this.upstreamIterator(expectedUploadSize)) { + // This will conveniently track and keep the size of the buffers. + // We will reach either the expected upload size or the remainder of the stream. + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_addLocalBufferCache).call(this, chunk); + } + // This is the sum from the `#addLocalBufferCache` calls + const bytesToUpload = this.localWriteCacheByteLength; + // Important: we want to know if the upstream has ended and the queue is empty before + // unshifting data back into the queue. This way we will know if this is the last request or not. + const isLastChunkOfUpload = !(await this.waitForNextChunk()); + // Important: put the data back in the queue for the actual upload + this.prependLocalBufferToUpstream(); + let totalObjectSize = this.contentLength; + if (typeof this.contentLength !== 'number' && + isLastChunkOfUpload && + !this.isPartialUpload) { + // Let's let the server know this is the last chunk of the object since we didn't set it before. + totalObjectSize = bytesToUpload + this.numBytesWritten; + } + // `- 1` as the ending byte is inclusive in the request. + const endingByte = bytesToUpload + this.numBytesWritten - 1; + // `Content-Length` for multiple chunk uploads is the size of the chunk, + // not the overall object + headers['Content-Length'] = bytesToUpload; + headers['Content-Range'] = + `bytes ${this.offset}-${endingByte}/${totalObjectSize}`; + } + else { + headers['Content-Range'] = `bytes ${this.offset}-*/${this.contentLength}`; + } + const reqOpts = { + method: 'PUT', + url: this.uri, + headers, + body: requestStream, + }; + try { + const resp = await this.makeRequestStream(reqOpts); + if (resp) { + responseReceived = true; + await this.responseHandler(resp); + } + } + catch (e) { + const err = e; + if (this.retryOptions.retryableErrorFn(err)) { + this.attemptDelayedRetry({ + status: NaN, + data: err, + }); + return; + } + this.destroy(err); + } + } + // Process the API response to look for errors that came in + // the response body. + async responseHandler(resp) { + if (resp.data.error) { + this.destroy(resp.data.error); + return; + } + // At this point we can safely create a new id for the chunk + this.currentInvocationId.chunk = uuid.v4(); + const moreDataToUpload = await this.waitForNextChunk(); + const shouldContinueWithNextMultiChunkRequest = this.chunkSize && + resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE && + resp.headers.range && + moreDataToUpload; + /** + * This is true when we're expecting to upload more data in a future request, + * yet the upstream for the upload session has been exhausted. + */ + const shouldContinueUploadInAnotherRequest = this.isPartialUpload && + resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE && + !moreDataToUpload; + if (shouldContinueWithNextMultiChunkRequest) { + // Use the upper value in this header to determine where to start the next chunk. + // We should not assume that the server received all bytes sent in the request. + // https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + const range = resp.headers.range; + this.offset = Number(range.split('-')[1]) + 1; + // We should not assume that the server received all bytes sent in the request. + // - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + const missingBytes = this.numBytesWritten - this.offset; + if (missingBytes) { + // As multi-chunk uploads send one chunk per request and pulls one + // chunk into the pipeline, prepending the missing bytes back should + // be fine for the next request. + this.prependLocalBufferToUpstream(missingBytes); + this.numBytesWritten -= missingBytes; + } + else { + // No bytes missing - no need to keep the local cache + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); + } + // continue uploading next chunk + this.continueUploading(); + } + else if (!this.isSuccessfulResponse(resp.status) && + !shouldContinueUploadInAnotherRequest) { + const err = new Error('Upload failed'); + err.code = resp.status; + err.name = 'Upload failed'; + if (resp === null || resp === void 0 ? void 0 : resp.data) { + err.errors = [resp === null || resp === void 0 ? void 0 : resp.data]; + } + this.destroy(err); + } + else { + // no need to keep the cache + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); + if (resp && resp.data) { + resp.data.size = Number(resp.data.size); + } + this.emit('metadata', resp.data); + // Allow the object (Upload) to continue naturally so the user's + // "finish" event fires. + this.emit('uploadFinished'); + } + } + /** + * Check the status of an existing resumable upload. + * + * @param cfg A configuration to use. `uri` is required. + * @returns the current upload status + */ + async checkUploadStatus(config = {}) { + let googAPIClient = `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${this.currentInvocationId.checkUploadStatus}`; + if (__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")) { + googAPIClient += ` gccl-gcs-cmd/${__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")}`; + } + const opts = { + method: 'PUT', + url: this.uri, + headers: { + 'Content-Length': 0, + 'Content-Range': 'bytes */*', + 'User-Agent': (0, util_js_1.getUserAgentString)(), + 'x-goog-api-client': googAPIClient, + }, + }; + try { + const resp = await this.makeRequest(opts); + // Successfully got the offset we can now create a new offset invocation id + this.currentInvocationId.checkUploadStatus = uuid.v4(); + return resp; + } + catch (e) { + if (config.retry === false || + !(e instanceof Error) || + !this.retryOptions.retryableErrorFn(e)) { + throw e; + } + const retryDelay = this.getRetryDelay(); + if (retryDelay <= 0) { + throw e; + } + await new Promise(res => setTimeout(res, retryDelay)); + return this.checkUploadStatus(config); + } + } + async getAndSetOffset() { + try { + // we want to handle retries in this method. + const resp = await this.checkUploadStatus({ retry: false }); + if (resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE) { + if (typeof resp.headers.range === 'string') { + this.offset = Number(resp.headers.range.split('-')[1]) + 1; + return; + } + } + this.offset = 0; + } + catch (e) { + const err = e; + if (this.retryOptions.retryableErrorFn(err)) { + this.attemptDelayedRetry({ + status: NaN, + data: err, + }); + return; + } + this.destroy(err); + } + } + async makeRequest(reqOpts) { + if (this.encryption) { + reqOpts.headers = reqOpts.headers || {}; + reqOpts.headers['x-goog-encryption-algorithm'] = 'AES256'; + reqOpts.headers['x-goog-encryption-key'] = this.encryption.key.toString(); + reqOpts.headers['x-goog-encryption-key-sha256'] = + this.encryption.hash.toString(); + } + if (this.userProject) { + reqOpts.params = reqOpts.params || {}; + reqOpts.params.userProject = this.userProject; + } + // Let gaxios know we will handle a 308 error code ourselves. + reqOpts.validateStatus = (status) => { + return (this.isSuccessfulResponse(status) || + status === RESUMABLE_INCOMPLETE_STATUS_CODE); + }; + const combinedReqOpts = { + ...this.customRequestOptions, + ...reqOpts, + headers: { + ...this.customRequestOptions.headers, + ...reqOpts.headers, + }, + }; + const res = await this.authClient.request(combinedReqOpts); + if (res.data && res.data.error) { + throw res.data.error; + } + return res; + } + async makeRequestStream(reqOpts) { + const controller = new abort_controller_1.default(); + const errorCallback = () => controller.abort(); + this.once('error', errorCallback); + if (this.userProject) { + reqOpts.params = reqOpts.params || {}; + reqOpts.params.userProject = this.userProject; + } + reqOpts.signal = controller.signal; + reqOpts.validateStatus = () => true; + const combinedReqOpts = { + ...this.customRequestOptions, + ...reqOpts, + headers: { + ...this.customRequestOptions.headers, + ...reqOpts.headers, + }, + }; + const res = await this.authClient.request(combinedReqOpts); + const successfulRequest = this.onResponse(res); + this.removeListener('error', errorCallback); + return successfulRequest ? res : null; + } + /** + * @return {bool} is the request good? + */ + onResponse(resp) { + if (resp.status !== 200 && + this.retryOptions.retryableErrorFn({ + code: resp.status, + message: resp.statusText, + name: resp.statusText, + })) { + this.attemptDelayedRetry(resp); + return false; + } + this.emit('response', resp); + return true; + } + /** + * @param resp GaxiosResponse object from previous attempt + */ + attemptDelayedRetry(resp) { + if (this.numRetries < this.retryOptions.maxRetries) { + if (resp.status === NOT_FOUND_STATUS_CODE && + this.numChunksReadInRequest === 0) { + this.startUploading(); + } + else { + const retryDelay = this.getRetryDelay(); + if (retryDelay <= 0) { + this.destroy(new Error(`Retry total time limit exceeded - ${JSON.stringify(resp.data)}`)); + return; + } + // Unshift the local cache back in case it's needed for the next request. + this.numBytesWritten -= this.localWriteCacheByteLength; + this.prependLocalBufferToUpstream(); + // We don't know how much data has been received by the server. + // `continueUploading` will recheck the offset via `getAndSetOffset`. + // If `offset` < `numberBytesReceived` then we will raise a RangeError + // as we've streamed too much data that has been missed - this should + // not be the case for multi-chunk uploads as `lastChunkSent` is the + // body of the entire request. + this.offset = undefined; + setTimeout(this.continueUploading.bind(this), retryDelay); + } + this.numRetries++; + } + else { + this.destroy(new Error(`Retry limit exceeded - ${JSON.stringify(resp.data)}`)); + } + } + /** + * The amount of time to wait before retrying the request, in milliseconds. + * If negative, do not retry. + * + * @returns the amount of time to wait, in milliseconds. + */ + getRetryDelay() { + const randomMs = Math.round(Math.random() * 1000); + const waitTime = Math.pow(this.retryOptions.retryDelayMultiplier, this.numRetries) * + 1000 + + randomMs; + const maxAllowableDelayMs = this.retryOptions.totalTimeout * 1000 - + (Date.now() - this.timeOfFirstRequest); + const maxRetryDelayMs = this.retryOptions.maxRetryDelay * 1000; + return Math.min(waitTime, maxRetryDelayMs, maxAllowableDelayMs); + } + /* + * Prepare user-defined API endpoint for compatibility with our API. + */ + sanitizeEndpoint(url) { + if (!exports.PROTOCOL_REGEX.test(url)) { + url = `https://${url}`; + } + return url.replace(/\/+$/, ''); // Remove trailing slashes + } + /** + * Check if a given status code is 2xx + * + * @param status The status code to check + * @returns if the status is 2xx + */ + isSuccessfulResponse(status) { + return status >= 200 && status < 300; + } +} +exports.Upload = Upload; +_Upload_gcclGcsCmd = new WeakMap(), _Upload_instances = new WeakSet(), _Upload_resetLocalBuffersCache = function _Upload_resetLocalBuffersCache() { + this.localWriteCache = []; + this.localWriteCacheByteLength = 0; +}, _Upload_addLocalBufferCache = function _Upload_addLocalBufferCache(buf) { + this.localWriteCache.push(buf); + this.localWriteCacheByteLength += buf.byteLength; +}; +function upload(cfg) { + return new Upload(cfg); +} +function createURI(cfg, callback) { + const up = new Upload(cfg); + if (!callback) { + return up.createURI(); + } + up.createURI().then(r => callback(null, r), callback); +} +/** + * Check the status of an existing resumable upload. + * + * @param cfg A configuration to use. `uri` is required. + * @returns the current upload status + */ +function checkUploadStatus(cfg) { + const up = new Upload(cfg); + return up.checkUploadStatus(); +} + + +/***/ }), + +/***/ 7319: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SigningError = exports.URLSigner = exports.PATH_STYLED_HOST = exports.SignerExceptionMessages = void 0; +const crypto = __importStar(__nccwpck_require__(6982)); +const url = __importStar(__nccwpck_require__(7016)); +const storage_js_1 = __nccwpck_require__(2848); +const util_js_1 = __nccwpck_require__(4557); +var SignerExceptionMessages; +(function (SignerExceptionMessages) { + SignerExceptionMessages["ACCESSIBLE_DATE_INVALID"] = "The accessible at date provided was invalid."; + SignerExceptionMessages["EXPIRATION_BEFORE_ACCESSIBLE_DATE"] = "An expiration date cannot be before accessible date."; + SignerExceptionMessages["X_GOOG_CONTENT_SHA256"] = "The header X-Goog-Content-SHA256 must be a hexadecimal string."; +})(SignerExceptionMessages || (exports.SignerExceptionMessages = SignerExceptionMessages = {})); +/* + * Default signing version for getSignedUrl is 'v2'. + */ +const DEFAULT_SIGNING_VERSION = 'v2'; +const SEVEN_DAYS = 7 * 24 * 60 * 60; +/** + * @const {string} + * @deprecated - unused + */ +exports.PATH_STYLED_HOST = 'https://storage.googleapis.com'; +class URLSigner { + constructor(auth, bucket, file, + /** + * A {@link Storage} object. + * + * @privateRemarks + * + * Technically this is a required field, however it would be a breaking change to + * move it before optional properties. In the next major we should refactor the + * constructor of this class to only accept a config object. + */ + storage = new storage_js_1.Storage()) { + this.auth = auth; + this.bucket = bucket; + this.file = file; + this.storage = storage; + } + getSignedUrl(cfg) { + const expiresInSeconds = this.parseExpires(cfg.expires); + const method = cfg.method; + const accessibleAtInSeconds = this.parseAccessibleAt(cfg.accessibleAt); + if (expiresInSeconds < accessibleAtInSeconds) { + throw new Error(SignerExceptionMessages.EXPIRATION_BEFORE_ACCESSIBLE_DATE); + } + let customHost; + // Default style is `path`. + const isVirtualHostedStyle = cfg.virtualHostedStyle || false; + if (cfg.cname) { + customHost = cfg.cname; + } + else if (isVirtualHostedStyle) { + customHost = `https://${this.bucket.name}.storage.${this.storage.universeDomain}`; + } + const secondsToMilliseconds = 1000; + const config = Object.assign({}, cfg, { + method, + expiration: expiresInSeconds, + accessibleAt: new Date(secondsToMilliseconds * accessibleAtInSeconds), + bucket: this.bucket.name, + file: this.file ? (0, util_js_1.encodeURI)(this.file.name, false) : undefined, + }); + if (customHost) { + config.cname = customHost; + } + const version = cfg.version || DEFAULT_SIGNING_VERSION; + let promise; + if (version === 'v2') { + promise = this.getSignedUrlV2(config); + } + else if (version === 'v4') { + promise = this.getSignedUrlV4(config); + } + else { + throw new Error(`Invalid signed URL version: ${version}. Supported versions are 'v2' and 'v4'.`); + } + return promise.then(query => { + var _a; + query = Object.assign(query, cfg.queryParams); + const signedUrl = new url.URL(((_a = cfg.host) === null || _a === void 0 ? void 0 : _a.toString()) || config.cname || this.storage.apiEndpoint); + signedUrl.pathname = this.getResourcePath(!!config.cname, this.bucket.name, config.file); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + signedUrl.search = (0, util_js_1.qsStringify)(query); + return signedUrl.href; + }); + } + getSignedUrlV2(config) { + const canonicalHeadersString = this.getCanonicalHeaders(config.extensionHeaders || {}); + const resourcePath = this.getResourcePath(false, config.bucket, config.file); + const blobToSign = [ + config.method, + config.contentMd5 || '', + config.contentType || '', + config.expiration, + canonicalHeadersString + resourcePath, + ].join('\n'); + const sign = async () => { + var _a; + const auth = this.auth; + try { + const signature = await auth.sign(blobToSign, (_a = config.signingEndpoint) === null || _a === void 0 ? void 0 : _a.toString()); + const credentials = await auth.getCredentials(); + return { + GoogleAccessId: credentials.client_email, + Expires: config.expiration, + Signature: signature, + }; + } + catch (err) { + const error = err; + const signingErr = new SigningError(error.message); + signingErr.stack = error.stack; + throw signingErr; + } + }; + return sign(); + } + getSignedUrlV4(config) { + var _a; + config.accessibleAt = config.accessibleAt + ? config.accessibleAt + : new Date(); + const millisecondsToSeconds = 1.0 / 1000.0; + const expiresPeriodInSeconds = config.expiration - config.accessibleAt.valueOf() * millisecondsToSeconds; + // v4 limit expiration to be 7 days maximum + if (expiresPeriodInSeconds > SEVEN_DAYS) { + throw new Error(`Max allowed expiration is seven days (${SEVEN_DAYS} seconds).`); + } + const extensionHeaders = Object.assign({}, config.extensionHeaders); + const fqdn = new url.URL(((_a = config.host) === null || _a === void 0 ? void 0 : _a.toString()) || config.cname || this.storage.apiEndpoint); + extensionHeaders.host = fqdn.hostname; + if (config.contentMd5) { + extensionHeaders['content-md5'] = config.contentMd5; + } + if (config.contentType) { + extensionHeaders['content-type'] = config.contentType; + } + let contentSha256; + const sha256Header = extensionHeaders['x-goog-content-sha256']; + if (sha256Header) { + if (typeof sha256Header !== 'string' || + !/[A-Fa-f0-9]{40}/.test(sha256Header)) { + throw new Error(SignerExceptionMessages.X_GOOG_CONTENT_SHA256); + } + contentSha256 = sha256Header; + } + const signedHeaders = Object.keys(extensionHeaders) + .map(header => header.toLowerCase()) + .sort() + .join(';'); + const extensionHeadersString = this.getCanonicalHeaders(extensionHeaders); + const datestamp = (0, util_js_1.formatAsUTCISO)(config.accessibleAt); + const credentialScope = `${datestamp}/auto/storage/goog4_request`; + const sign = async () => { + var _a; + const credentials = await this.auth.getCredentials(); + const credential = `${credentials.client_email}/${credentialScope}`; + const dateISO = (0, util_js_1.formatAsUTCISO)(config.accessibleAt ? config.accessibleAt : new Date(), true); + const queryParams = { + 'X-Goog-Algorithm': 'GOOG4-RSA-SHA256', + 'X-Goog-Credential': credential, + 'X-Goog-Date': dateISO, + 'X-Goog-Expires': expiresPeriodInSeconds.toString(10), + 'X-Goog-SignedHeaders': signedHeaders, + ...(config.queryParams || {}), + }; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const canonicalQueryParams = this.getCanonicalQueryParams(queryParams); + const canonicalRequest = this.getCanonicalRequest(config.method, this.getResourcePath(!!config.cname, config.bucket, config.file), canonicalQueryParams, extensionHeadersString, signedHeaders, contentSha256); + const hash = crypto + .createHash('sha256') + .update(canonicalRequest) + .digest('hex'); + const blobToSign = [ + 'GOOG4-RSA-SHA256', + dateISO, + credentialScope, + hash, + ].join('\n'); + try { + const signature = await this.auth.sign(blobToSign, (_a = config.signingEndpoint) === null || _a === void 0 ? void 0 : _a.toString()); + const signatureHex = Buffer.from(signature, 'base64').toString('hex'); + const signedQuery = Object.assign({}, queryParams, { + 'X-Goog-Signature': signatureHex, + }); + return signedQuery; + } + catch (err) { + const error = err; + const signingErr = new SigningError(error.message); + signingErr.stack = error.stack; + throw signingErr; + } + }; + return sign(); + } + /** + * Create canonical headers for signing v4 url. + * + * The canonical headers for v4-signing a request demands header names are + * first lowercased, followed by sorting the header names. + * Then, construct the canonical headers part of the request: + * + ":" + Trim() + "\n" + * .. + * + ":" + Trim() + "\n" + * + * @param headers + * @private + */ + getCanonicalHeaders(headers) { + // Sort headers by their lowercased names + const sortedHeaders = (0, util_js_1.objectEntries)(headers) + // Convert header names to lowercase + .map(([headerName, value]) => [ + headerName.toLowerCase(), + value, + ]) + .sort((a, b) => a[0].localeCompare(b[0])); + return sortedHeaders + .filter(([, value]) => value !== undefined) + .map(([headerName, value]) => { + // - Convert Array (multi-valued header) into string, delimited by + // ',' (no space). + // - Trim leading and trailing spaces. + // - Convert sequential (2+) spaces into a single space + const canonicalValue = `${value}`.trim().replace(/\s{2,}/g, ' '); + return `${headerName}:${canonicalValue}\n`; + }) + .join(''); + } + getCanonicalRequest(method, path, query, headers, signedHeaders, contentSha256) { + return [ + method, + path, + query, + headers, + signedHeaders, + contentSha256 || 'UNSIGNED-PAYLOAD', + ].join('\n'); + } + getCanonicalQueryParams(query) { + return (0, util_js_1.objectEntries)(query) + .map(([key, value]) => [(0, util_js_1.encodeURI)(key, true), (0, util_js_1.encodeURI)(value, true)]) + .sort((a, b) => (a[0] < b[0] ? -1 : 1)) + .map(([key, value]) => `${key}=${value}`) + .join('&'); + } + getResourcePath(cname, bucket, file) { + if (cname) { + return '/' + (file || ''); + } + else if (file) { + return `/${bucket}/${file}`; + } + else { + return `/${bucket}`; + } + } + parseExpires(expires, current = new Date()) { + const expiresInMSeconds = new Date(expires).valueOf(); + if (isNaN(expiresInMSeconds)) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_INVALID); + } + if (expiresInMSeconds < current.valueOf()) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_PAST); + } + return Math.floor(expiresInMSeconds / 1000); // The API expects seconds. + } + parseAccessibleAt(accessibleAt) { + const accessibleAtInMSeconds = new Date(accessibleAt || new Date()).valueOf(); + if (isNaN(accessibleAtInMSeconds)) { + throw new Error(SignerExceptionMessages.ACCESSIBLE_DATE_INVALID); + } + return Math.floor(accessibleAtInMSeconds / 1000); // The API expects seconds. + } +} +exports.URLSigner = URLSigner; +/** + * Custom error type for errors related to getting signed errors and policies. + * + * @private + */ +class SigningError extends Error { + constructor() { + super(...arguments); + this.name = 'SigningError'; + } +} +exports.SigningError = SigningError; + + +/***/ }), + +/***/ 2848: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Storage = exports.RETRYABLE_ERR_FN_DEFAULT = exports.MAX_RETRY_DELAY_DEFAULT = exports.TOTAL_TIMEOUT_DEFAULT = exports.RETRY_DELAY_MULTIPLIER_DEFAULT = exports.MAX_RETRY_DEFAULT = exports.AUTO_RETRY_DEFAULT = exports.PROTOCOL_REGEX = exports.StorageExceptionMessages = exports.ExceptionMessages = exports.IdempotencyStrategy = void 0; +const index_js_1 = __nccwpck_require__(1325); +const paginator_1 = __nccwpck_require__(9469); +const promisify_1 = __nccwpck_require__(206); +const stream_1 = __nccwpck_require__(2203); +const bucket_js_1 = __nccwpck_require__(2887); +const channel_js_1 = __nccwpck_require__(2658); +const file_js_1 = __nccwpck_require__(9975); +const util_js_1 = __nccwpck_require__(4557); +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +const package_json_helper_cjs_1 = __nccwpck_require__(1969); +const hmacKey_js_1 = __nccwpck_require__(5891); +const crc32c_js_1 = __nccwpck_require__(4317); +const google_auth_library_1 = __nccwpck_require__(492); +var IdempotencyStrategy; +(function (IdempotencyStrategy) { + IdempotencyStrategy[IdempotencyStrategy["RetryAlways"] = 0] = "RetryAlways"; + IdempotencyStrategy[IdempotencyStrategy["RetryConditional"] = 1] = "RetryConditional"; + IdempotencyStrategy[IdempotencyStrategy["RetryNever"] = 2] = "RetryNever"; +})(IdempotencyStrategy || (exports.IdempotencyStrategy = IdempotencyStrategy = {})); +var ExceptionMessages; +(function (ExceptionMessages) { + ExceptionMessages["EXPIRATION_DATE_INVALID"] = "The expiration date provided was invalid."; + ExceptionMessages["EXPIRATION_DATE_PAST"] = "An expiration date cannot be in the past."; +})(ExceptionMessages || (exports.ExceptionMessages = ExceptionMessages = {})); +var StorageExceptionMessages; +(function (StorageExceptionMessages) { + StorageExceptionMessages["BUCKET_NAME_REQUIRED"] = "A bucket name is needed to use Cloud Storage."; + StorageExceptionMessages["BUCKET_NAME_REQUIRED_CREATE"] = "A name is required to create a bucket."; + StorageExceptionMessages["HMAC_SERVICE_ACCOUNT"] = "The first argument must be a service account email to create an HMAC key."; + StorageExceptionMessages["HMAC_ACCESS_ID"] = "An access ID is needed to create an HmacKey object."; +})(StorageExceptionMessages || (exports.StorageExceptionMessages = StorageExceptionMessages = {})); +exports.PROTOCOL_REGEX = /^(\w*):\/\//; +/** + * Default behavior: Automatically retry retriable server errors. + * + * @const {boolean} + */ +exports.AUTO_RETRY_DEFAULT = true; +/** + * Default behavior: Only attempt to retry retriable errors 3 times. + * + * @const {number} + */ +exports.MAX_RETRY_DEFAULT = 3; +/** + * Default behavior: Wait twice as long as previous retry before retrying. + * + * @const {number} + */ +exports.RETRY_DELAY_MULTIPLIER_DEFAULT = 2; +/** + * Default behavior: If the operation doesn't succeed after 600 seconds, + * stop retrying. + * + * @const {number} + */ +exports.TOTAL_TIMEOUT_DEFAULT = 600; +/** + * Default behavior: Wait no more than 64 seconds between retries. + * + * @const {number} + */ +exports.MAX_RETRY_DELAY_DEFAULT = 64; +/** + * Default behavior: Retry conditionally idempotent operations if correct preconditions are set. + * + * @const {enum} + * @private + */ +const IDEMPOTENCY_STRATEGY_DEFAULT = IdempotencyStrategy.RetryConditional; +/** + * Returns true if the API request should be retried, given the error that was + * given the first time the request was attempted. + * @const + * @param {error} err - The API error to check if it is appropriate to retry. + * @return {boolean} True if the API request should be retried, false otherwise. + */ +const RETRYABLE_ERR_FN_DEFAULT = function (err) { + var _a; + const isConnectionProblem = (reason) => { + return (reason.includes('eai_again') || // DNS lookup error + reason === 'econnreset' || + reason === 'unexpected connection closure' || + reason === 'epipe' || + reason === 'socket connection timeout'); + }; + if (err) { + if ([408, 429, 500, 502, 503, 504].indexOf(err.code) !== -1) { + return true; + } + if (typeof err.code === 'string') { + if (['408', '429', '500', '502', '503', '504'].indexOf(err.code) !== -1) { + return true; + } + const reason = err.code.toLowerCase(); + if (isConnectionProblem(reason)) { + return true; + } + } + if (err.errors) { + for (const e of err.errors) { + const reason = (_a = e === null || e === void 0 ? void 0 : e.reason) === null || _a === void 0 ? void 0 : _a.toString().toLowerCase(); + if (reason && isConnectionProblem(reason)) { + return true; + } + } + } + } + return false; +}; +exports.RETRYABLE_ERR_FN_DEFAULT = RETRYABLE_ERR_FN_DEFAULT; +/*! Developer Documentation + * + * Invoke this method to create a new Storage object bound with pre-determined + * configuration options. For each object that can be created (e.g., a bucket), + * there is an equivalent static and instance method. While they are classes, + * they can be instantiated without use of the `new` keyword. + */ +/** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * This object provides constants to refer to the three permission levels that + * can be granted to an entity: + * + * - `gcs.acl.OWNER_ROLE` - ("OWNER") + * - `gcs.acl.READER_ROLE` - ("READER") + * - `gcs.acl.WRITER_ROLE` - ("WRITER") + * + * See {@link https://cloud.google.com/storage/docs/access-control/lists| About Access Control Lists} + * + * @name Storage#acl + * @type {object} + * @property {string} OWNER_ROLE + * @property {string} READER_ROLE + * @property {string} WRITER_ROLE + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const albums = storage.bucket('albums'); + * + * //- + * // Make all of the files currently in a bucket publicly readable. + * //- + * const options = { + * entity: 'allUsers', + * role: storage.acl.READER_ROLE + * }; + * + * albums.acl.add(options, function(err, aclObject) {}); + * + * //- + * // Make any new objects added to a bucket publicly readable. + * //- + * albums.acl.default.add(options, function(err, aclObject) {}); + * + * //- + * // Grant a user ownership permissions to a bucket. + * //- + * albums.acl.add({ + * entity: 'user-useremail@example.com', + * role: storage.acl.OWNER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * albums.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ +/** + * Get {@link Bucket} objects for all of the buckets in your project as + * a readable object stream. + * + * @method Storage#getBucketsStream + * @param {GetBucketsRequest} [query] Query object for listing buckets. + * @returns {ReadableStream} A readable stream that emits {@link Bucket} + * instances. + * + * @example + * ``` + * storage.getBucketsStream() + * .on('error', console.error) + * .on('data', function(bucket) { + * // bucket is a Bucket object. + * }) + * .on('end', function() { + * // All buckets retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * storage.getBucketsStream() + * .on('data', function(bucket) { + * this.end(); + * }); + * ``` + */ +/** + * Get {@link HmacKey} objects for all of the HMAC keys in the project in a + * readable object stream. + * + * @method Storage#getHmacKeysStream + * @param {GetHmacKeysOptions} [options] Configuration options. + * @returns {ReadableStream} A readable stream that emits {@link HmacKey} + * instances. + * + * @example + * ``` + * storage.getHmacKeysStream() + * .on('error', console.error) + * .on('data', function(hmacKey) { + * // hmacKey is an HmacKey object. + * }) + * .on('end', function() { + * // All HmacKey retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * storage.getHmacKeysStream() + * .on('data', function(bucket) { + * this.end(); + * }); + * ``` + */ +/** + *

ACLs

+ * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share files with other users + * and allow other users to access your buckets and files. + * + * To learn more about ACLs, read this overview on + * {@link https://cloud.google.com/storage/docs/access-control| Access Control}. + * + * See {@link https://cloud.google.com/storage/docs/overview| Cloud Storage overview} + * See {@link https://cloud.google.com/storage/docs/access-control| Access Control} + * + * @class + */ +class Storage extends index_js_1.Service { + getBucketsStream() { + // placeholder body, overwritten in constructor + return new stream_1.Readable(); + } + getHmacKeysStream() { + // placeholder body, overwritten in constructor + return new stream_1.Readable(); + } + /** + * @callback Crc32cGeneratorToStringCallback + * A method returning the CRC32C as a base64-encoded string. + * + * @returns {string} + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ + /** + * @callback Crc32cGeneratorValidateCallback + * A method validating a base64-encoded CRC32C string. + * + * @param {string} [value] base64-encoded CRC32C string to validate + * @returns {boolean} + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + **/ + /** + * @callback Crc32cGeneratorUpdateCallback + * A method for passing `Buffer`s for CRC32C generation. + * + * @param {Buffer} [data] data to update CRC32C value with + * @returns {undefined} + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + **/ + /** + * @typedef {object} CRC32CValidator + * @property {Crc32cGeneratorToStringCallback} + * @property {Crc32cGeneratorValidateCallback} + * @property {Crc32cGeneratorUpdateCallback} + */ + /** + * @callback Crc32cGeneratorCallback + * @returns {CRC32CValidator} + */ + /** + * @typedef {object} StorageOptions + * @property {string} [projectId] The project ID from the Google Developer's + * Console, e.g. 'grape-spaceship-123'. We will also check the environment + * variable `GCLOUD_PROJECT` for your project ID. If your app is running + * in an environment which supports {@link + * https://cloud.google.com/docs/authentication/production#providing_credentials_to_your_application + * Application Default Credentials}, your project ID will be detected + * automatically. + * @property {string} [keyFilename] Full path to the a .json, .pem, or .p12 key + * downloaded from the Google Developers Console. If you provide a path to + * a JSON file, the `projectId` option above is not necessary. NOTE: .pem and + * .p12 require you to specify the `email` option as well. + * @property {string} [email] Account email address. Required when using a .pem + * or .p12 keyFilename. + * @property {object} [credentials] Credentials object. + * @property {string} [credentials.client_email] + * @property {string} [credentials.private_key] + * @property {object} [retryOptions] Options for customizing retries. Retriable server errors + * will be retried with exponential delay between them dictated by the formula + * max(maxRetryDelay, retryDelayMultiplier*retryNumber) until maxRetries or totalTimeout + * has been reached. Retries will only happen if autoRetry is set to true. + * @property {boolean} [retryOptions.autoRetry=true] Automatically retry requests if the + * response is related to rate limits or certain intermittent server + * errors. We will exponentially backoff subsequent requests by default. + * @property {number} [retryOptions.retryDelayMultiplier = 2] the multiplier by which to + * increase the delay time between the completion of failed requests, and the + * initiation of the subsequent retrying request. + * @property {number} [retryOptions.totalTimeout = 600] The total time, starting from + * when the initial request is sent, after which an error will + * be returned, regardless of the retrying attempts made meanwhile. + * @property {number} [retryOptions.maxRetryDelay = 64] The maximum delay time between requests. + * When this value is reached, ``retryDelayMultiplier`` will no longer be used to + * increase delay time. + * @property {number} [retryOptions.maxRetries=3] Maximum number of automatic retries + * attempted before returning the error. + * @property {function} [retryOptions.retryableErrorFn] Function that returns true if a given + * error should be retried and false otherwise. + * @property {enum} [retryOptions.idempotencyStrategy=IdempotencyStrategy.RetryConditional] Enumeration + * controls how conditionally idempotent operations are retried. Possible values are: RetryAlways - + * will respect other retry settings and attempt to retry conditionally idempotent operations. RetryConditional - + * will retry conditionally idempotent operations if the correct preconditions are set. RetryNever - never + * retry a conditionally idempotent operation. + * @property {string} [userAgent] The value to be prepended to the User-Agent + * header in API requests. + * @property {object} [authClient] `AuthClient` or `GoogleAuth` client to reuse instead of creating a new one. + * @property {number} [timeout] The amount of time in milliseconds to wait per http request before timing out. + * @property {object[]} [interceptors_] Array of custom request interceptors to be returned in the order they were assigned. + * @property {string} [apiEndpoint = storage.google.com] The API endpoint of the service used to make requests. + * @property {boolean} [useAuthWithCustomEndpoint = false] Controls whether or not to use authentication when using a custom endpoint. + * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C} + */ + /** + * Constructs the Storage client. + * + * @example + * Create a client that uses Application Default Credentials + * (ADC) + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * ``` + * + * @example + * Create a client with explicit credentials + * ``` + * const storage = new Storage({ + * projectId: 'your-project-id', + * keyFilename: '/path/to/keyfile.json' + * }); + * ``` + * + * @example + * Create a client with credentials passed + * by value as a JavaScript object + * ``` + * const storage = new Storage({ + * projectId: 'your-project-id', + * credentials: { + * type: 'service_account', + * project_id: 'xxxxxxx', + * private_key_id: 'xxxx', + * private_key:'-----BEGIN PRIVATE KEY-----xxxxxxx\n-----END PRIVATE KEY-----\n', + * client_email: 'xxxx', + * client_id: 'xxx', + * auth_uri: 'https://accounts.google.com/o/oauth2/auth', + * token_uri: 'https://oauth2.googleapis.com/token', + * auth_provider_x509_cert_url: 'https://www.googleapis.com/oauth2/v1/certs', + * client_x509_cert_url: 'xxx', + * } + * }); + * ``` + * + * @example + * Create a client with credentials passed + * by loading a JSON file directly from disk + * ``` + * const storage = new Storage({ + * projectId: 'your-project-id', + * credentials: require('/path/to-keyfile.json') + * }); + * ``` + * + * @example + * Create a client with an `AuthClient` (e.g. `DownscopedClient`) + * ``` + * const {DownscopedClient} = require('google-auth-library'); + * const authClient = new DownscopedClient({...}); + * + * const storage = new Storage({authClient}); + * ``` + * + * Additional samples: + * - https://github.com/googleapis/google-auth-library-nodejs#sample-usage-1 + * - https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/downscopedclient.js + * + * @param {StorageOptions} [options] Configuration options. + */ + constructor(options = {}) { + var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p; + const universe = options.universeDomain || google_auth_library_1.DEFAULT_UNIVERSE; + let apiEndpoint = `https://storage.${universe}`; + let customEndpoint = false; + // Note: EMULATOR_HOST is an experimental configuration variable. Use apiEndpoint instead. + const EMULATOR_HOST = process.env.STORAGE_EMULATOR_HOST; + if (typeof EMULATOR_HOST === 'string') { + apiEndpoint = Storage.sanitizeEndpoint(EMULATOR_HOST); + customEndpoint = true; + } + if (options.apiEndpoint && options.apiEndpoint !== apiEndpoint) { + apiEndpoint = Storage.sanitizeEndpoint(options.apiEndpoint); + customEndpoint = true; + } + options = Object.assign({}, options, { apiEndpoint }); + // Note: EMULATOR_HOST is an experimental configuration variable. Use apiEndpoint instead. + const baseUrl = EMULATOR_HOST || `${options.apiEndpoint}/storage/v1`; + const config = { + apiEndpoint: options.apiEndpoint, + retryOptions: { + autoRetry: ((_a = options.retryOptions) === null || _a === void 0 ? void 0 : _a.autoRetry) !== undefined + ? (_b = options.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry + : exports.AUTO_RETRY_DEFAULT, + maxRetries: ((_c = options.retryOptions) === null || _c === void 0 ? void 0 : _c.maxRetries) + ? (_d = options.retryOptions) === null || _d === void 0 ? void 0 : _d.maxRetries + : exports.MAX_RETRY_DEFAULT, + retryDelayMultiplier: ((_e = options.retryOptions) === null || _e === void 0 ? void 0 : _e.retryDelayMultiplier) + ? (_f = options.retryOptions) === null || _f === void 0 ? void 0 : _f.retryDelayMultiplier + : exports.RETRY_DELAY_MULTIPLIER_DEFAULT, + totalTimeout: ((_g = options.retryOptions) === null || _g === void 0 ? void 0 : _g.totalTimeout) + ? (_h = options.retryOptions) === null || _h === void 0 ? void 0 : _h.totalTimeout + : exports.TOTAL_TIMEOUT_DEFAULT, + maxRetryDelay: ((_j = options.retryOptions) === null || _j === void 0 ? void 0 : _j.maxRetryDelay) + ? (_k = options.retryOptions) === null || _k === void 0 ? void 0 : _k.maxRetryDelay + : exports.MAX_RETRY_DELAY_DEFAULT, + retryableErrorFn: ((_l = options.retryOptions) === null || _l === void 0 ? void 0 : _l.retryableErrorFn) + ? (_m = options.retryOptions) === null || _m === void 0 ? void 0 : _m.retryableErrorFn + : exports.RETRYABLE_ERR_FN_DEFAULT, + idempotencyStrategy: ((_o = options.retryOptions) === null || _o === void 0 ? void 0 : _o.idempotencyStrategy) !== undefined + ? (_p = options.retryOptions) === null || _p === void 0 ? void 0 : _p.idempotencyStrategy + : IDEMPOTENCY_STRATEGY_DEFAULT, + }, + baseUrl, + customEndpoint, + useAuthWithCustomEndpoint: options === null || options === void 0 ? void 0 : options.useAuthWithCustomEndpoint, + projectIdRequired: false, + scopes: [ + 'https://www.googleapis.com/auth/iam', + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/devstorage.full_control', + ], + packageJson: (0, package_json_helper_cjs_1.getPackageJSON)(), + }; + super(config, options); + /** + * Reference to {@link Storage.acl}. + * + * @name Storage#acl + * @see Storage.acl + */ + this.acl = Storage.acl; + this.crc32cGenerator = + options.crc32cGenerator || crc32c_js_1.CRC32C_DEFAULT_VALIDATOR_GENERATOR; + this.retryOptions = config.retryOptions; + this.getBucketsStream = paginator_1.paginator.streamify('getBuckets'); + this.getHmacKeysStream = paginator_1.paginator.streamify('getHmacKeys'); + } + static sanitizeEndpoint(url) { + if (!exports.PROTOCOL_REGEX.test(url)) { + url = `https://${url}`; + } + return url.replace(/\/+$/, ''); // Remove trailing slashes + } + /** + * Get a reference to a Cloud Storage bucket. + * + * @param {string} name Name of the bucket. + * @param {object} [options] Configuration object. + * @param {string} [options.kmsKeyName] A Cloud KMS key that will be used to + * encrypt objects inserted into this bucket, if no encryption method is + * specified. + * @param {string} [options.userProject] User project to be billed for all + * requests made from this Bucket object. + * @returns {Bucket} + * @see Bucket + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const albums = storage.bucket('albums'); + * const photos = storage.bucket('photos'); + * ``` + */ + bucket(name, options) { + if (!name) { + throw new Error(StorageExceptionMessages.BUCKET_NAME_REQUIRED); + } + return new bucket_js_1.Bucket(this, name, options); + } + /** + * Reference a channel to receive notifications about changes to your bucket. + * + * @param {string} id The ID of the channel. + * @param {string} resourceId The resource ID of the channel. + * @returns {Channel} + * @see Channel + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const channel = storage.channel('id', 'resource-id'); + * ``` + */ + channel(id, resourceId) { + return new channel_js_1.Channel(this, id, resourceId); + } + /** + * @typedef {array} CreateBucketResponse + * @property {Bucket} 0 The new {@link Bucket}. + * @property {object} 1 The full API response. + */ + /** + * @callback CreateBucketCallback + * @param {?Error} err Request error, if any. + * @param {Bucket} bucket The new {@link Bucket}. + * @param {object} apiResponse The full API response. + */ + /** + * Metadata to set for the bucket. + * + * @typedef {object} CreateBucketRequest + * @property {boolean} [archive=false] Specify the storage class as Archive. + * @property {object} [autoclass.enabled=false] Specify whether Autoclass is + * enabled for the bucket. + * @property {object} [autoclass.terminalStorageClass='NEARLINE'] The storage class that objects in an Autoclass bucket eventually transition to if + * they are not read for a certain length of time. Valid values are NEARLINE and ARCHIVE. + * @property {boolean} [coldline=false] Specify the storage class as Coldline. + * @property {Cors[]} [cors=[]] Specify the CORS configuration to use. + * @property {CustomPlacementConfig} [customPlacementConfig={}] Specify the bucket's regions for dual-region buckets. + * For more information, see {@link https://cloud.google.com/storage/docs/locations| Bucket Locations}. + * @property {boolean} [dra=false] Specify the storage class as Durable Reduced + * Availability. + * @property {boolean} [enableObjectRetention=false] Specifiy whether or not object retention should be enabled on this bucket. + * @property {object} [hierarchicalNamespace.enabled=false] Specify whether or not to enable hierarchical namespace on this bucket. + * @property {string} [location] Specify the bucket's location. If specifying + * a dual-region, the `customPlacementConfig` property should be set in conjunction. + * For more information, see {@link https://cloud.google.com/storage/docs/locations| Bucket Locations}. + * @property {boolean} [multiRegional=false] Specify the storage class as + * Multi-Regional. + * @property {boolean} [nearline=false] Specify the storage class as Nearline. + * @property {boolean} [regional=false] Specify the storage class as Regional. + * @property {boolean} [requesterPays=false] Force the use of the User Project metadata field to assign operational + * costs when an operation is made on a Bucket and its objects. + * @property {string} [rpo] For dual-region buckets, controls whether turbo + * replication is enabled (`ASYNC_TURBO`) or disabled (`DEFAULT`). + * @property {boolean} [standard=true] Specify the storage class as Standard. + * @property {string} [storageClass] The new storage class. (`standard`, + * `nearline`, `coldline`, or `archive`). + * **Note:** The storage classes `multi_regional`, `regional`, and + * `durable_reduced_availability` are now legacy and will be deprecated in + * the future. + * @property {Versioning} [versioning=undefined] Specify the versioning status. + * @property {string} [userProject] The ID of the project which will be billed + * for the request. + */ + /** + * Create a bucket. + * + * Cloud Storage uses a flat namespace, so you can't create a bucket with + * a name that is already in use. For more information, see + * {@link https://cloud.google.com/storage/docs/bucketnaming.html#requirements| Bucket Naming Guidelines}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/insert| Buckets: insert API Documentation} + * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes} + * + * @param {string} name Name of the bucket to create. + * @param {CreateBucketRequest} [metadata] Metadata to set for the bucket. + * @param {CreateBucketCallback} [callback] Callback function. + * @returns {Promise} + * @throws {Error} If a name is not provided. + * @see Bucket#create + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const callback = function(err, bucket, apiResponse) { + * // `bucket` is a Bucket object. + * }; + * + * storage.createBucket('new-bucket', callback); + * + * //- + * // Create a bucket in a specific location and region. See the + * // Official JSON API docs for complete details on the `location` + * option. + * // + * //- + * const metadata = { + * location: 'US-CENTRAL1', + * regional: true + * }; + * + * storage.createBucket('new-bucket', metadata, callback); + * + * //- + * // Create a bucket with a retention policy of 6 months. + * //- + * const metadata = { + * retentionPolicy: { + * retentionPeriod: 15780000 // 6 months in seconds. + * } + * }; + * + * storage.createBucket('new-bucket', metadata, callback); + * + * //- + * // Enable versioning on a new bucket. + * //- + * const metadata = { + * versioning: { + * enabled: true + * } + * }; + * + * storage.createBucket('new-bucket', metadata, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.createBucket('new-bucket').then(function(data) { + * const bucket = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/buckets.js + * region_tag:storage_create_bucket + * Another example: + */ + createBucket(name, metadataOrCallback, callback) { + if (!name) { + throw new Error(StorageExceptionMessages.BUCKET_NAME_REQUIRED_CREATE); + } + let metadata; + if (!callback) { + callback = metadataOrCallback; + metadata = {}; + } + else { + metadata = metadataOrCallback; + } + const body = { + ...metadata, + name, + }; + const storageClasses = { + archive: 'ARCHIVE', + coldline: 'COLDLINE', + dra: 'DURABLE_REDUCED_AVAILABILITY', + multiRegional: 'MULTI_REGIONAL', + nearline: 'NEARLINE', + regional: 'REGIONAL', + standard: 'STANDARD', + }; + const storageClassKeys = Object.keys(storageClasses); + for (const storageClass of storageClassKeys) { + if (body[storageClass]) { + if (metadata.storageClass && metadata.storageClass !== storageClass) { + throw new Error(`Both \`${storageClass}\` and \`storageClass\` were provided.`); + } + body.storageClass = storageClasses[storageClass]; + delete body[storageClass]; + } + } + if (body.requesterPays) { + body.billing = { + requesterPays: body.requesterPays, + }; + delete body.requesterPays; + } + const query = { + project: this.projectId, + }; + if (body.userProject) { + query.userProject = body.userProject; + delete body.userProject; + } + if (body.enableObjectRetention) { + query.enableObjectRetention = body.enableObjectRetention; + delete body.enableObjectRetention; + } + if (body.predefinedAcl) { + query.predefinedAcl = body.predefinedAcl; + delete body.predefinedAcl; + } + if (body.predefinedDefaultObjectAcl) { + query.predefinedDefaultObjectAcl = body.predefinedDefaultObjectAcl; + delete body.predefinedDefaultObjectAcl; + } + if (body.projection) { + query.projection = body.projection; + delete body.projection; + } + this.request({ + method: 'POST', + uri: '/b', + qs: query, + json: body, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + const bucket = this.bucket(name); + bucket.metadata = resp; + callback(null, bucket, resp); + }); + } + /** + * @typedef {object} CreateHmacKeyOptions + * @property {string} [projectId] The project ID of the project that owns + * the service account of the requested HMAC key. If not provided, + * the project ID used to instantiate the Storage client will be used. + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * @typedef {object} HmacKeyMetadata + * @property {string} accessId The access id identifies which HMAC key was + * used to sign a request when authenticating with HMAC. + * @property {string} etag Used to perform a read-modify-write of the key. + * @property {string} id The resource name of the HMAC key. + * @property {string} projectId The project ID. + * @property {string} serviceAccountEmail The service account's email this + * HMAC key is created for. + * @property {string} state The state of this HMAC key. One of "ACTIVE", + * "INACTIVE" or "DELETED". + * @property {string} timeCreated The creation time of the HMAC key in + * RFC 3339 format. + * @property {string} [updated] The time this HMAC key was last updated in + * RFC 3339 format. + */ + /** + * @typedef {array} CreateHmacKeyResponse + * @property {HmacKey} 0 The HmacKey instance created from API response. + * @property {string} 1 The HMAC key's secret used to access the XML API. + * @property {object} 3 The raw API response. + */ + /** + * @callback CreateHmacKeyCallback Callback function. + * @param {?Error} err Request error, if any. + * @param {HmacKey} hmacKey The HmacKey instance created from API response. + * @param {string} secret The HMAC key's secret used to access the XML API. + * @param {object} apiResponse The raw API response. + */ + /** + * Create an HMAC key associated with an service account to authenticate + * requests to the Cloud Storage XML API. + * + * See {@link https://cloud.google.com/storage/docs/authentication/hmackeys| HMAC keys documentation} + * + * @param {string} serviceAccountEmail The service account's email address + * with which the HMAC key is created for. + * @param {CreateHmacKeyCallback} [callback] Callback function. + * @return {Promise} + * + * @example + * ``` + * const {Storage} = require('google-cloud/storage'); + * const storage = new Storage(); + * + * // Replace with your service account's email address + * const serviceAccountEmail = + * 'my-service-account@appspot.gserviceaccount.com'; + * + * storage.createHmacKey(serviceAccountEmail, function(err, hmacKey, secret) { + * if (!err) { + * // Securely store the secret for use with the XML API. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.createHmacKey(serviceAccountEmail) + * .then((response) => { + * const hmacKey = response[0]; + * const secret = response[1]; + * // Securely store the secret for use with the XML API. + * }); + * ``` + */ + createHmacKey(serviceAccountEmail, optionsOrCb, cb) { + if (typeof serviceAccountEmail !== 'string') { + throw new Error(StorageExceptionMessages.HMAC_SERVICE_ACCOUNT); + } + const { options, callback } = (0, util_js_1.normalize)(optionsOrCb, cb); + const query = Object.assign({}, options, { serviceAccountEmail }); + const projectId = query.projectId || this.projectId; + delete query.projectId; + this.request({ + method: 'POST', + uri: `/projects/${projectId}/hmacKeys`, + qs: query, + maxRetries: 0, //explicitly set this value since this is a non-idempotent function + }, (err, resp) => { + if (err) { + callback(err, null, null, resp); + return; + } + const metadata = resp.metadata; + const hmacKey = this.hmacKey(metadata.accessId, { + projectId: metadata.projectId, + }); + hmacKey.metadata = resp.metadata; + callback(null, hmacKey, resp.secret, resp); + }); + } + /** + * Query object for listing buckets. + * + * @typedef {object} GetBucketsRequest + * @property {boolean} [autoPaginate=true] Have pagination handled + * automatically. + * @property {number} [maxApiCalls] Maximum number of API calls to make. + * @property {number} [maxResults] Maximum number of items plus prefixes to + * return per call. + * Note: By default will handle pagination automatically + * if more than 1 page worth of results are requested per call. + * When `autoPaginate` is set to `false` the smaller of `maxResults` + * or 1 page of results will be returned per call. + * @property {string} [pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @property {string} [userProject] The ID of the project which will be billed + * for the request. + * @param {boolean} [softDeleted] If true, returns the soft-deleted object. + * Object `generation` is required if `softDeleted` is set to True. + */ + /** + * @typedef {array} GetBucketsResponse + * @property {Bucket[]} 0 Array of {@link Bucket} instances. + * @property {object} 1 nextQuery A query object to receive more results. + * @property {object} 2 The full API response. + */ + /** + * @callback GetBucketsCallback + * @param {?Error} err Request error, if any. + * @param {Bucket[]} buckets Array of {@link Bucket} instances. + * @param {object} nextQuery A query object to receive more results. + * @param {object} apiResponse The full API response. + */ + /** + * Get Bucket objects for all of the buckets in your project. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/list| Buckets: list API Documentation} + * + * @param {GetBucketsRequest} [query] Query object for listing buckets. + * @param {GetBucketsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * storage.getBuckets(function(err, buckets) { + * if (!err) { + * // buckets is an array of Bucket objects. + * } + * }); + * + * //- + * // To control how many API requests are made and page through the results + * // manually, set `autoPaginate` to `false`. + * //- + * const callback = function(err, buckets, nextQuery, apiResponse) { + * if (nextQuery) { + * // More results exist. + * storage.getBuckets(nextQuery, callback); + * } + * + * // The `metadata` property is populated for you with the metadata at the + * // time of fetching. + * buckets[0].metadata; + * + * // However, in cases where you are concerned the metadata could have + * // changed, use the `getMetadata` method. + * buckets[0].getMetadata(function(err, metadata, apiResponse) {}); + * }; + * + * storage.getBuckets({ + * autoPaginate: false + * }, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.getBuckets().then(function(data) { + * const buckets = data[0]; + * }); + * + * ``` + * @example include:samples/buckets.js + * region_tag:storage_list_buckets + * Another example: + */ + getBuckets(optionsOrCallback, cb) { + const { options, callback } = (0, util_js_1.normalize)(optionsOrCallback, cb); + options.project = options.project || this.projectId; + this.request({ + uri: '/b', + qs: options, + }, (err, resp) => { + if (err) { + callback(err, null, null, resp); + return; + } + const itemsArray = resp.items ? resp.items : []; + const buckets = itemsArray.map((bucket) => { + const bucketInstance = this.bucket(bucket.id); + bucketInstance.metadata = bucket; + return bucketInstance; + }); + const nextQuery = resp.nextPageToken + ? Object.assign({}, options, { pageToken: resp.nextPageToken }) + : null; + callback(null, buckets, nextQuery, resp); + }); + } + getHmacKeys(optionsOrCb, cb) { + const { options, callback } = (0, util_js_1.normalize)(optionsOrCb, cb); + const query = Object.assign({}, options); + const projectId = query.projectId || this.projectId; + delete query.projectId; + this.request({ + uri: `/projects/${projectId}/hmacKeys`, + qs: query, + }, (err, resp) => { + if (err) { + callback(err, null, null, resp); + return; + } + const itemsArray = resp.items ? resp.items : []; + const hmacKeys = itemsArray.map((hmacKey) => { + const hmacKeyInstance = this.hmacKey(hmacKey.accessId, { + projectId: hmacKey.projectId, + }); + hmacKeyInstance.metadata = hmacKey; + return hmacKeyInstance; + }); + const nextQuery = resp.nextPageToken + ? Object.assign({}, options, { pageToken: resp.nextPageToken }) + : null; + callback(null, hmacKeys, nextQuery, resp); + }); + } + /** + * @typedef {array} GetServiceAccountResponse + * @property {object} 0 The service account resource. + * @property {object} 1 The full + * {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| API response}. + */ + /** + * @callback GetServiceAccountCallback + * @param {?Error} err Request error, if any. + * @param {object} serviceAccount The serviceAccount resource. + * @param {string} serviceAccount.emailAddress The service account email + * address. + * @param {object} apiResponse The full + * {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| API response}. + */ + /** + * Get the email address of this project's Google Cloud Storage service + * account. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount/get| Projects.serviceAccount: get API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| Projects.serviceAccount Resource} + * + * @param {object} [options] Configuration object. + * @param {string} [options.userProject] User project to be billed for this + * request. + * @param {GetServiceAccountCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * storage.getServiceAccount(function(err, serviceAccount, apiResponse) { + * if (!err) { + * const serviceAccountEmail = serviceAccount.emailAddress; + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.getServiceAccount().then(function(data) { + * const serviceAccountEmail = data[0].emailAddress; + * const apiResponse = data[1]; + * }); + * ``` + */ + getServiceAccount(optionsOrCallback, cb) { + const { options, callback } = (0, util_js_1.normalize)(optionsOrCallback, cb); + this.request({ + uri: `/projects/${this.projectId}/serviceAccount`, + qs: options, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + const camelCaseResponse = {}; + for (const prop in resp) { + // eslint-disable-next-line no-prototype-builtins + if (resp.hasOwnProperty(prop)) { + const camelCaseProp = prop.replace(/_(\w)/g, (_, match) => match.toUpperCase()); + camelCaseResponse[camelCaseProp] = resp[prop]; + } + } + callback(null, camelCaseResponse, resp); + }); + } + /** + * Get a reference to an HmacKey object. + * Note: this does not fetch the HMAC key's metadata. Use HmacKey#get() to + * retrieve and populate the metadata. + * + * To get a reference to an HMAC key that's not created for a service + * account in the same project used to instantiate the Storage client, + * supply the project's ID as `projectId` in the `options` argument. + * + * @param {string} accessId The HMAC key's access ID. + * @param {HmacKeyOptions} options HmacKey constructor options. + * @returns {HmacKey} + * @see HmacKey + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const hmacKey = storage.hmacKey('ACCESS_ID'); + * ``` + */ + hmacKey(accessId, options) { + if (!accessId) { + throw new Error(StorageExceptionMessages.HMAC_ACCESS_ID); + } + return new hmacKey_js_1.HmacKey(this, accessId, options); + } +} +exports.Storage = Storage; +/** + * {@link Bucket} class. + * + * @name Storage.Bucket + * @see Bucket + * @type {Constructor} + */ +Storage.Bucket = bucket_js_1.Bucket; +/** + * {@link Channel} class. + * + * @name Storage.Channel + * @see Channel + * @type {Constructor} + */ +Storage.Channel = channel_js_1.Channel; +/** + * {@link File} class. + * + * @name Storage.File + * @see File + * @type {Constructor} + */ +Storage.File = file_js_1.File; +/** + * {@link HmacKey} class. + * + * @name Storage.HmacKey + * @see HmacKey + * @type {Constructor} + */ +Storage.HmacKey = hmacKey_js_1.HmacKey; +Storage.acl = { + OWNER_ROLE: 'OWNER', + READER_ROLE: 'READER', + WRITER_ROLE: 'WRITER', +}; +/*! Developer Documentation + * + * These methods can be auto-paginated. + */ +paginator_1.paginator.extend(Storage, ['getBuckets', 'getHmacKeys']); +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Storage, { + exclude: ['bucket', 'channel', 'hmacKey'], +}); + + +/***/ }), + +/***/ 4: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +var _XMLMultiPartUploadHelper_instances, _XMLMultiPartUploadHelper_setGoogApiClientHeaders, _XMLMultiPartUploadHelper_handleErrorResponse; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.TransferManager = exports.MultiPartUploadError = void 0; +const file_js_1 = __nccwpck_require__(9975); +const p_limit_1 = __importDefault(__nccwpck_require__(9977)); +const path = __importStar(__nccwpck_require__(6928)); +const fs_1 = __nccwpck_require__(9896); +const crc32c_js_1 = __nccwpck_require__(4317); +const google_auth_library_1 = __nccwpck_require__(492); +const fast_xml_parser_1 = __nccwpck_require__(9741); +const async_retry_1 = __importDefault(__nccwpck_require__(5195)); +const crypto_1 = __nccwpck_require__(6982); +const util_js_1 = __nccwpck_require__(7325); +const util_js_2 = __nccwpck_require__(4557); +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +const package_json_helper_cjs_1 = __nccwpck_require__(1969); +const packageJson = (0, package_json_helper_cjs_1.getPackageJSON)(); +/** + * Default number of concurrently executing promises to use when calling uploadManyFiles. + * + */ +const DEFAULT_PARALLEL_UPLOAD_LIMIT = 5; +/** + * Default number of concurrently executing promises to use when calling downloadManyFiles. + * + */ +const DEFAULT_PARALLEL_DOWNLOAD_LIMIT = 5; +/** + * Default number of concurrently executing promises to use when calling downloadFileInChunks. + * + */ +const DEFAULT_PARALLEL_CHUNKED_DOWNLOAD_LIMIT = 5; +/** + * The minimum size threshold in bytes at which to apply a chunked download strategy when calling downloadFileInChunks. + * + */ +const DOWNLOAD_IN_CHUNKS_FILE_SIZE_THRESHOLD = 32 * 1024 * 1024; +/** + * The chunk size in bytes to use when calling downloadFileInChunks. + * + */ +const DOWNLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE = 32 * 1024 * 1024; +/** + * The chunk size in bytes to use when calling uploadFileInChunks. + * + */ +const UPLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE = 32 * 1024 * 1024; +/** + * Default number of concurrently executing promises to use when calling uploadFileInChunks. + * + */ +const DEFAULT_PARALLEL_CHUNKED_UPLOAD_LIMIT = 5; +const EMPTY_REGEX = '(?:)'; +/** + * The `gccl-gcs-cmd` value for the `X-Goog-API-Client` header. + * Example: `gccl-gcs-cmd/tm.upload_many` + * + * @see {@link GCCL_GCS_CMD}. + * @see {@link GCCL_GCS_CMD_KEY}. + */ +const GCCL_GCS_CMD_FEATURE = { + UPLOAD_MANY: 'tm.upload_many', + DOWNLOAD_MANY: 'tm.download_many', + UPLOAD_SHARDED: 'tm.upload_sharded', + DOWNLOAD_SHARDED: 'tm.download_sharded', +}; +const defaultMultiPartGenerator = (bucket, fileName, uploadId, partsMap) => { + return new XMLMultiPartUploadHelper(bucket, fileName, uploadId, partsMap); +}; +class MultiPartUploadError extends Error { + constructor(message, uploadId, partsMap) { + super(message); + this.uploadId = uploadId; + this.partsMap = partsMap; + } +} +exports.MultiPartUploadError = MultiPartUploadError; +/** + * Class representing an implementation of MPU in the XML API. This class is not meant for public usage. + * + * @private + * + */ +class XMLMultiPartUploadHelper { + constructor(bucket, fileName, uploadId, partsMap) { + _XMLMultiPartUploadHelper_instances.add(this); + this.authClient = bucket.storage.authClient || new google_auth_library_1.GoogleAuth(); + this.uploadId = uploadId || ''; + this.bucket = bucket; + this.fileName = fileName; + this.baseUrl = `https://${bucket.name}.${new URL(this.bucket.storage.apiEndpoint).hostname}/${fileName}`; + this.xmlBuilder = new fast_xml_parser_1.XMLBuilder({ arrayNodeName: 'Part' }); + this.xmlParser = new fast_xml_parser_1.XMLParser(); + this.partsMap = partsMap || new Map(); + this.retryOptions = { + retries: this.bucket.storage.retryOptions.maxRetries, + factor: this.bucket.storage.retryOptions.retryDelayMultiplier, + maxTimeout: this.bucket.storage.retryOptions.maxRetryDelay * 1000, + maxRetryTime: this.bucket.storage.retryOptions.totalTimeout * 1000, + }; + } + /** + * Initiates a multipart upload (MPU) to the XML API and stores the resultant upload id. + * + * @returns {Promise} + */ + async initiateUpload(headers = {}) { + const url = `${this.baseUrl}?uploads`; + return (0, async_retry_1.default)(async (bail) => { + try { + const res = await this.authClient.request({ + headers: __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_setGoogApiClientHeaders).call(this, headers), + method: 'POST', + url, + }); + if (res.data && res.data.error) { + throw res.data.error; + } + const parsedXML = this.xmlParser.parse(res.data); + this.uploadId = parsedXML.InitiateMultipartUploadResult.UploadId; + } + catch (e) { + __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail); + } + }, this.retryOptions); + } + /** + * Uploads the provided chunk of data to the XML API using the previously created upload id. + * + * @param {number} partNumber the sequence number of this chunk. + * @param {Buffer} chunk the chunk of data to be uploaded. + * @param {string | false} validation whether or not to include the md5 hash in the headers to cause the server + * to validate the chunk was not corrupted. + * @returns {Promise} + */ + async uploadPart(partNumber, chunk, validation) { + const url = `${this.baseUrl}?partNumber=${partNumber}&uploadId=${this.uploadId}`; + let headers = __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_setGoogApiClientHeaders).call(this); + if (validation === 'md5') { + const hash = (0, crypto_1.createHash)('md5').update(chunk).digest('base64'); + headers = { + 'Content-MD5': hash, + }; + } + return (0, async_retry_1.default)(async (bail) => { + try { + const res = await this.authClient.request({ + url, + method: 'PUT', + body: chunk, + headers, + }); + if (res.data && res.data.error) { + throw res.data.error; + } + this.partsMap.set(partNumber, res.headers['etag']); + } + catch (e) { + __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail); + } + }, this.retryOptions); + } + /** + * Sends the final request of the MPU to tell GCS the upload is now complete. + * + * @returns {Promise} + */ + async completeUpload() { + const url = `${this.baseUrl}?uploadId=${this.uploadId}`; + const sortedMap = new Map([...this.partsMap.entries()].sort((a, b) => a[0] - b[0])); + const parts = []; + for (const entry of sortedMap.entries()) { + parts.push({ PartNumber: entry[0], ETag: entry[1] }); + } + const body = `${this.xmlBuilder.build(parts)}`; + return (0, async_retry_1.default)(async (bail) => { + try { + const res = await this.authClient.request({ + headers: __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_setGoogApiClientHeaders).call(this), + url, + method: 'POST', + body, + }); + if (res.data && res.data.error) { + throw res.data.error; + } + return res; + } + catch (e) { + __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail); + return; + } + }, this.retryOptions); + } + /** + * Aborts an multipart upload that is in progress. Once aborted, any parts in the process of being uploaded fail, + * and future requests using the upload ID fail. + * + * @returns {Promise} + */ + async abortUpload() { + const url = `${this.baseUrl}?uploadId=${this.uploadId}`; + return (0, async_retry_1.default)(async (bail) => { + try { + const res = await this.authClient.request({ + url, + method: 'DELETE', + }); + if (res.data && res.data.error) { + throw res.data.error; + } + } + catch (e) { + __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail); + return; + } + }, this.retryOptions); + } +} +_XMLMultiPartUploadHelper_instances = new WeakSet(), _XMLMultiPartUploadHelper_setGoogApiClientHeaders = function _XMLMultiPartUploadHelper_setGoogApiClientHeaders(headers = {}) { + let headerFound = false; + let userAgentFound = false; + for (const [key, value] of Object.entries(headers)) { + if (key.toLocaleLowerCase().trim() === 'x-goog-api-client') { + headerFound = true; + // Prepend command feature to value, if not already there + if (!value.includes(GCCL_GCS_CMD_FEATURE.UPLOAD_SHARDED)) { + headers[key] = + `${value} gccl-gcs-cmd/${GCCL_GCS_CMD_FEATURE.UPLOAD_SHARDED}`; + } + } + else if (key.toLocaleLowerCase().trim() === 'user-agent') { + userAgentFound = true; + } + } + // If the header isn't present, add it + if (!headerFound) { + headers['x-goog-api-client'] = `${(0, util_js_2.getRuntimeTrackingString)()} gccl/${packageJson.version} gccl-gcs-cmd/${GCCL_GCS_CMD_FEATURE.UPLOAD_SHARDED}`; + } + // If the User-Agent isn't present, add it + if (!userAgentFound) { + headers['User-Agent'] = (0, util_js_2.getUserAgentString)(); + } + return headers; +}, _XMLMultiPartUploadHelper_handleErrorResponse = function _XMLMultiPartUploadHelper_handleErrorResponse(err, bail) { + if (this.bucket.storage.retryOptions.autoRetry && + this.bucket.storage.retryOptions.retryableErrorFn(err)) { + throw err; + } + else { + bail(err); + } +}; +/** + * Create a TransferManager object to perform parallel transfer operations on a Cloud Storage bucket. + * + * @class + * @hideconstructor + * + * @param {Bucket} bucket A {@link Bucket} instance + * + */ +class TransferManager { + constructor(bucket) { + this.bucket = bucket; + } + /** + * @typedef {object} UploadManyFilesOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when uploading the files. + * @property {Function} [customDestinationBuilder] A fuction that will take the current path of a local file + * and return a string representing a custom path to be used to upload the file to GCS. + * @property {boolean} [skipIfExists] Do not upload the file if it already exists in + * the bucket. This will set the precondition ifGenerationMatch = 0. + * @property {string} [prefix] A prefix to append to all of the uploaded files. + * @property {object} [passthroughOptions] {@link UploadOptions} Options to be passed through + * to each individual upload operation. + * + */ + /** + * Upload multiple files in parallel to the bucket. This is a convenience method + * that utilizes {@link Bucket#upload} to perform the upload. + * + * @param {array | string} [filePathsOrDirectory] An array of fully qualified paths to the files or a directory name. + * If a directory name is provided, the directory will be recursively walked and all files will be added to the upload list. + * to be uploaded to the bucket + * @param {UploadManyFilesOptions} [options] Configuration options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Upload multiple files in parallel. + * //- + * const response = await transferManager.uploadManyFiles(['/local/path/file1.txt, 'local/path/file2.txt']); + * // Your bucket now contains: + * // - "local/path/file1.txt" (with the contents of '/local/path/file1.txt') + * // - "local/path/file2.txt" (with the contents of '/local/path/file2.txt') + * const response = await transferManager.uploadManyFiles('/local/directory'); + * // Your bucket will now contain all files contained in '/local/directory' maintaining the subdirectory structure. + * ``` + * + */ + async uploadManyFiles(filePathsOrDirectory, options = {}) { + var _a; + if (options.skipIfExists && ((_a = options.passthroughOptions) === null || _a === void 0 ? void 0 : _a.preconditionOpts)) { + options.passthroughOptions.preconditionOpts.ifGenerationMatch = 0; + } + else if (options.skipIfExists && + options.passthroughOptions === undefined) { + options.passthroughOptions = { + preconditionOpts: { + ifGenerationMatch: 0, + }, + }; + } + const limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_UPLOAD_LIMIT); + const promises = []; + let allPaths = []; + if (!Array.isArray(filePathsOrDirectory)) { + for await (const curPath of this.getPathsFromDirectory(filePathsOrDirectory)) { + allPaths.push(curPath); + } + } + else { + allPaths = filePathsOrDirectory; + } + for (const filePath of allPaths) { + const stat = await fs_1.promises.lstat(filePath); + if (stat.isDirectory()) { + continue; + } + const passThroughOptionsCopy = { + ...options.passthroughOptions, + [util_js_1.GCCL_GCS_CMD_KEY]: GCCL_GCS_CMD_FEATURE.UPLOAD_MANY, + }; + passThroughOptionsCopy.destination = options.customDestinationBuilder + ? options.customDestinationBuilder(filePath, options) + : filePath.split(path.sep).join(path.posix.sep); + if (options.prefix) { + passThroughOptionsCopy.destination = path.posix.join(...options.prefix.split(path.sep), passThroughOptionsCopy.destination); + } + promises.push(limit(() => this.bucket.upload(filePath, passThroughOptionsCopy))); + } + return Promise.all(promises); + } + /** + * @typedef {object} DownloadManyFilesOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when downloading the files. + * @property {string} [prefix] A prefix to append to all of the downloaded files. + * @property {string} [stripPrefix] A prefix to remove from all of the downloaded files. + * @property {object} [passthroughOptions] {@link DownloadOptions} Options to be passed through + * to each individual download operation. + * @property {boolean} [skipIfExists] Do not download the file if it already exists in + * the destination. + * + */ + /** + * Download multiple files in parallel to the local filesystem. This is a convenience method + * that utilizes {@link File#download} to perform the download. + * + * @param {array | string} [filesOrFolder] An array of file name strings or file objects to be downloaded. If + * a string is provided this will be treated as a GCS prefix and all files with that prefix will be downloaded. + * @param {DownloadManyFilesOptions} [options] Configuration options. Setting options.prefix or options.stripPrefix + * or options.passthroughOptions.destination will cause the downloaded files to be written to the file system + * instead of being returned as a buffer. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Download multiple files in parallel. + * //- + * const response = await transferManager.downloadManyFiles(['file1.txt', 'file2.txt']); + * // The following files have been downloaded: + * // - "file1.txt" (with the contents from my-bucket.file1.txt) + * // - "file2.txt" (with the contents from my-bucket.file2.txt) + * const response = await transferManager.downloadManyFiles([bucket.File('file1.txt'), bucket.File('file2.txt')]); + * // The following files have been downloaded: + * // - "file1.txt" (with the contents from my-bucket.file1.txt) + * // - "file2.txt" (with the contents from my-bucket.file2.txt) + * const response = await transferManager.downloadManyFiles('test-folder'); + * // All files with GCS prefix of 'test-folder' have been downloaded. + * ``` + * + */ + async downloadManyFiles(filesOrFolder, options = {}) { + const limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_DOWNLOAD_LIMIT); + const promises = []; + let files = []; + if (!Array.isArray(filesOrFolder)) { + const directoryFiles = await this.bucket.getFiles({ + prefix: filesOrFolder, + }); + files = directoryFiles[0]; + } + else { + files = filesOrFolder.map(curFile => { + if (typeof curFile === 'string') { + return this.bucket.file(curFile); + } + return curFile; + }); + } + const stripRegexString = options.stripPrefix + ? `^${options.stripPrefix}` + : EMPTY_REGEX; + const regex = new RegExp(stripRegexString, 'g'); + for (const file of files) { + const passThroughOptionsCopy = { + ...options.passthroughOptions, + [util_js_1.GCCL_GCS_CMD_KEY]: GCCL_GCS_CMD_FEATURE.DOWNLOAD_MANY, + }; + if (options.prefix || passThroughOptionsCopy.destination) { + passThroughOptionsCopy.destination = path.join(options.prefix || '', passThroughOptionsCopy.destination || '', file.name); + } + if (options.stripPrefix) { + passThroughOptionsCopy.destination = file.name.replace(regex, ''); + } + if (options.skipIfExists && + (0, fs_1.existsSync)(passThroughOptionsCopy.destination || '')) { + continue; + } + promises.push(limit(async () => { + const destination = passThroughOptionsCopy.destination; + if (destination && destination.endsWith(path.sep)) { + await fs_1.promises.mkdir(destination, { recursive: true }); + return Promise.resolve([ + Buffer.alloc(0), + ]); + } + return file.download(passThroughOptionsCopy); + })); + } + return Promise.all(promises); + } + /** + * @typedef {object} DownloadFileInChunksOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when downloading the file. + * @property {number} [chunkSizeBytes] The size in bytes of each chunk to be downloaded. + * @property {string | boolean} [validation] Whether or not to perform a CRC32C validation check when download is complete. + * @property {boolean} [noReturnData] Whether or not to return the downloaded data. A `true` value here would be useful for files with a size that will not fit into memory. + * + */ + /** + * Download a large file in chunks utilizing parallel download operations. This is a convenience method + * that utilizes {@link File#download} to perform the download. + * + * @param {File | string} fileOrName {@link File} to download. + * @param {DownloadFileInChunksOptions} [options] Configuration options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Download a large file in chunks utilizing parallel operations. + * //- + * const response = await transferManager.downloadFileInChunks(bucket.file('large-file.txt'); + * // Your local directory now contains: + * // - "large-file.txt" (with the contents from my-bucket.large-file.txt) + * ``` + * + */ + async downloadFileInChunks(fileOrName, options = {}) { + let chunkSize = options.chunkSizeBytes || DOWNLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE; + let limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_CHUNKED_DOWNLOAD_LIMIT); + const noReturnData = Boolean(options.noReturnData); + const promises = []; + const file = typeof fileOrName === 'string' + ? this.bucket.file(fileOrName) + : fileOrName; + const fileInfo = await file.get(); + const size = parseInt(fileInfo[0].metadata.size.toString()); + // If the file size does not meet the threshold download it as a single chunk. + if (size < DOWNLOAD_IN_CHUNKS_FILE_SIZE_THRESHOLD) { + limit = (0, p_limit_1.default)(1); + chunkSize = size; + } + let start = 0; + const filePath = options.destination || path.basename(file.name); + const fileToWrite = await fs_1.promises.open(filePath, 'w'); + while (start < size) { + const chunkStart = start; + let chunkEnd = start + chunkSize - 1; + chunkEnd = chunkEnd > size ? size : chunkEnd; + promises.push(limit(async () => { + const resp = await file.download({ + start: chunkStart, + end: chunkEnd, + [util_js_1.GCCL_GCS_CMD_KEY]: GCCL_GCS_CMD_FEATURE.DOWNLOAD_SHARDED, + }); + const result = await fileToWrite.write(resp[0], 0, resp[0].length, chunkStart); + if (noReturnData) + return; + return result.buffer; + })); + start += chunkSize; + } + let chunks; + try { + chunks = await Promise.all(promises); + } + finally { + await fileToWrite.close(); + } + if (options.validation === 'crc32c' && fileInfo[0].metadata.crc32c) { + const downloadedCrc32C = await crc32c_js_1.CRC32C.fromFile(filePath); + if (!downloadedCrc32C.validate(fileInfo[0].metadata.crc32c)) { + const mismatchError = new file_js_1.RequestError(file_js_1.FileExceptionMessages.DOWNLOAD_MISMATCH); + mismatchError.code = 'CONTENT_DOWNLOAD_MISMATCH'; + throw mismatchError; + } + } + if (noReturnData) + return; + return [Buffer.concat(chunks, size)]; + } + /** + * @typedef {object} UploadFileInChunksOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when uploading the file. + * @property {number} [chunkSizeBytes] The size in bytes of each chunk to be uploaded. + * @property {string} [uploadName] Name of the file when saving to GCS. If ommitted the name is taken from the file path. + * @property {number} [maxQueueSize] The number of chunks to be uploaded to hold in memory concurrently. If not specified + * defaults to the specified concurrency limit. + * @property {string} [uploadId] If specified attempts to resume a previous upload. + * @property {Map} [partsMap] If specified alongside uploadId, attempts to resume a previous upload from the last chunk + * specified in partsMap + * @property {object} [headers] headers to be sent when initiating the multipart upload. + * See {@link https://cloud.google.com/storage/docs/xml-api/post-object-multipart#request_headers| Request Headers: Initiate a Multipart Upload} + * @property {boolean} [autoAbortFailure] boolean to indicate if an in progress upload session will be automatically aborted upon failure. If not set, + * failures will be automatically aborted. + * + */ + /** + * Upload a large file in chunks utilizing parallel upload opertions. If the upload fails, an uploadId and + * map containing all the successfully uploaded parts will be returned to the caller. These arguments can be used to + * resume the upload. + * + * @param {string} [filePath] The path of the file to be uploaded + * @param {UploadFileInChunksOptions} [options] Configuration options. + * @param {MultiPartHelperGenerator} [generator] A function that will return a type that implements the MPU interface. Most users will not need to use this. + * @returns {Promise} If successful a promise resolving to void, otherwise a error containing the message, uploadid, and parts map. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Upload a large file in chunks utilizing parallel operations. + * //- + * const response = await transferManager.uploadFileInChunks('large-file.txt'); + * // Your bucket now contains: + * // - "large-file.txt" + * ``` + * + * + */ + async uploadFileInChunks(filePath, options = {}, generator = defaultMultiPartGenerator) { + const chunkSize = options.chunkSizeBytes || UPLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE; + const limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_CHUNKED_UPLOAD_LIMIT); + const maxQueueSize = options.maxQueueSize || + options.concurrencyLimit || + DEFAULT_PARALLEL_CHUNKED_UPLOAD_LIMIT; + const fileName = options.uploadName || path.basename(filePath); + const mpuHelper = generator(this.bucket, fileName, options.uploadId, options.partsMap); + let partNumber = 1; + let promises = []; + try { + if (options.uploadId === undefined) { + await mpuHelper.initiateUpload(options.headers); + } + const startOrResumptionByte = mpuHelper.partsMap.size * chunkSize; + const readStream = (0, fs_1.createReadStream)(filePath, { + highWaterMark: chunkSize, + start: startOrResumptionByte, + }); + // p-limit only limits the number of running promises. We do not want to hold an entire + // large file in memory at once so promises acts a queue that will hold only maxQueueSize in memory. + for await (const curChunk of readStream) { + if (promises.length >= maxQueueSize) { + await Promise.all(promises); + promises = []; + } + promises.push(limit(() => mpuHelper.uploadPart(partNumber++, curChunk, options.validation))); + } + await Promise.all(promises); + return await mpuHelper.completeUpload(); + } + catch (e) { + if ((options.autoAbortFailure === undefined || options.autoAbortFailure) && + mpuHelper.uploadId) { + try { + await mpuHelper.abortUpload(); + return; + } + catch (e) { + throw new MultiPartUploadError(e.message, mpuHelper.uploadId, mpuHelper.partsMap); + } + } + throw new MultiPartUploadError(e.message, mpuHelper.uploadId, mpuHelper.partsMap); + } + } + async *getPathsFromDirectory(directory) { + const filesAndSubdirectories = await fs_1.promises.readdir(directory, { + withFileTypes: true, + }); + for (const curFileOrDirectory of filesAndSubdirectories) { + const fullPath = path.join(directory, curFileOrDirectory.name); + curFileOrDirectory.isDirectory() + ? yield* this.getPathsFromDirectory(fullPath) + : yield fullPath; + } + } +} +exports.TransferManager = TransferManager; + + +/***/ }), + +/***/ 4557: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = this && this.__createBinding || (Object.create ? function (o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { + enumerable: true, + get: function () { + return m[k]; + } + }; + } + Object.defineProperty(o, k2, desc); +} : function (o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +}); +var __setModuleDefault = this && this.__setModuleDefault || (Object.create ? function (o, v) { + Object.defineProperty(o, "default", { + enumerable: true, + value: v + }); +} : function (o, v) { + o["default"] = v; +}); +var __importStar = this && this.__importStar || function () { + var ownKeys = function (o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +}(); +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports.PassThroughShim = void 0; +exports.normalize = normalize; +exports.objectEntries = objectEntries; +exports.fixedEncodeURIComponent = fixedEncodeURIComponent; +exports.encodeURI = encodeURI; +exports.qsStringify = qsStringify; +exports.objectKeyToLowercase = objectKeyToLowercase; +exports.unicodeJSONStringify = unicodeJSONStringify; +exports.convertObjKeysToSnakeCase = convertObjKeysToSnakeCase; +exports.formatAsUTCISO = formatAsUTCISO; +exports.getRuntimeTrackingString = getRuntimeTrackingString; +exports.getUserAgentString = getUserAgentString; +exports.getDirName = getDirName; +exports.getModuleFormat = getModuleFormat; +const path = __importStar(__nccwpck_require__(6928)); +const querystring = __importStar(__nccwpck_require__(3480)); +const stream_1 = __nccwpck_require__(2203); +const url = __importStar(__nccwpck_require__(7016)); +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +const package_json_helper_cjs_1 = __nccwpck_require__(1969); +// Done to avoid a problem with mangling of identifiers when using esModuleInterop +const fileURLToPath = url.fileURLToPath; +const isEsm = false; +function normalize(optionsOrCallback, cb) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + return { + options, + callback + }; +} +/** + * Flatten an object into an Array of arrays, [[key, value], ..]. + * Implements Object.entries() for Node.js <8 + * @internal + */ +function objectEntries(obj) { + return Object.keys(obj).map(key => [key, obj[key]]); +} +/** + * Encode `str` with encodeURIComponent, plus these + * reserved characters: `! * ' ( )`. + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/encodeURIComponent| MDN: fixedEncodeURIComponent} + * + * @param {string} str The URI component to encode. + * @return {string} The encoded string. + */ +function fixedEncodeURIComponent(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, c => '%' + c.charCodeAt(0).toString(16).toUpperCase()); +} +/** + * URI encode `uri` for generating signed URLs, using fixedEncodeURIComponent. + * + * Encode every byte except `A-Z a-Z 0-9 ~ - . _`. + * + * @param {string} uri The URI to encode. + * @param [boolean=false] encodeSlash If `true`, the "/" character is not encoded. + * @return {string} The encoded string. + */ +function encodeURI(uri, encodeSlash) { + // Split the string by `/`, and conditionally rejoin them with either + // %2F if encodeSlash is `true`, or '/' if `false`. + return uri.split('/').map(fixedEncodeURIComponent).join(encodeSlash ? '%2F' : '/'); +} +/** + * Serialize an object to a URL query string using util.encodeURI(uri, true). + * @param {string} url The object to serialize. + * @return {string} Serialized string. + */ +function qsStringify(qs) { + return querystring.stringify(qs, '&', '=', { + encodeURIComponent: component => encodeURI(component, true) + }); +} +function objectKeyToLowercase(object) { + const newObj = {}; + for (let key of Object.keys(object)) { + const value = object[key]; + key = key.toLowerCase(); + newObj[key] = value; + } + return newObj; +} +/** + * JSON encode str, with unicode \u+ representation. + * @param {object} obj The object to encode. + * @return {string} Serialized string. + */ +function unicodeJSONStringify(obj) { + return JSON.stringify(obj).replace(/[\u0080-\uFFFF]/g, char => '\\u' + ('0000' + char.charCodeAt(0).toString(16)).slice(-4)); +} +/** + * Converts the given objects keys to snake_case + * @param {object} obj object to convert keys to snake case. + * @returns {object} object with keys converted to snake case. + */ +function convertObjKeysToSnakeCase(obj) { + if (obj instanceof Date || obj instanceof RegExp) { + return obj; + } + if (Array.isArray(obj)) { + return obj.map(convertObjKeysToSnakeCase); + } + if (obj instanceof Object) { + return Object.keys(obj).reduce((acc, cur) => { + const s = cur[0].toLocaleLowerCase() + cur.slice(1).replace(/([A-Z]+)/g, (match, p1) => { + return `_${p1.toLowerCase()}`; + }); + acc[s] = convertObjKeysToSnakeCase(obj[cur]); + return acc; + }, Object()); + } + return obj; +} +/** + * Formats the provided date object as a UTC ISO string. + * @param {Date} dateTimeToFormat date object to be formatted. + * @param {boolean} includeTime flag to include hours, minutes, seconds in output. + * @param {string} dateDelimiter delimiter between date components. + * @param {string} timeDelimiter delimiter between time components. + * @returns {string} UTC ISO format of provided date obect. + */ +function formatAsUTCISO(dateTimeToFormat, includeTime = false, dateDelimiter = '', timeDelimiter = '') { + const year = dateTimeToFormat.getUTCFullYear(); + const month = dateTimeToFormat.getUTCMonth() + 1; + const day = dateTimeToFormat.getUTCDate(); + const hour = dateTimeToFormat.getUTCHours(); + const minute = dateTimeToFormat.getUTCMinutes(); + const second = dateTimeToFormat.getUTCSeconds(); + let resultString = `${year.toString().padStart(4, '0')}${dateDelimiter}${month.toString().padStart(2, '0')}${dateDelimiter}${day.toString().padStart(2, '0')}`; + if (includeTime) { + resultString = `${resultString}T${hour.toString().padStart(2, '0')}${timeDelimiter}${minute.toString().padStart(2, '0')}${timeDelimiter}${second.toString().padStart(2, '0')}Z`; + } + return resultString; +} +/** + * Examines the runtime environment and returns the appropriate tracking string. + * @returns {string} metrics tracking string based on the current runtime environment. + */ +function getRuntimeTrackingString() { + if ( + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + globalThis.Deno && + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + globalThis.Deno.version && + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + globalThis.Deno.version.deno) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + return `gl-deno/${globalThis.Deno.version.deno}`; + } else { + return `gl-node/${process.versions.node}`; + } +} +/** + * Looks at package.json and creates the user-agent string to be applied to request headers. + * @returns {string} user agent string. + */ +function getUserAgentString() { + const pkg = (0, package_json_helper_cjs_1.getPackageJSON)(); + const hyphenatedPackageName = pkg.name.replace('@google-cloud', 'gcloud-node') // For legacy purposes. + .replace('/', '-'); // For UA spec-compliance purposes. + return hyphenatedPackageName + '/' + pkg.version; +} +function getDirName() { + let dirToUse = ''; + try { + dirToUse = __dirname; + } catch (e) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + dirToUse = __dirname; + } + return dirToUse; +} +function getModuleFormat() { + return isEsm ? 'ESM' : 'CJS'; +} +class PassThroughShim extends stream_1.PassThrough { + constructor() { + super(...arguments); + this.shouldEmitReading = true; + this.shouldEmitWriting = true; + } + _read(size) { + if (this.shouldEmitReading) { + this.emit('reading'); + this.shouldEmitReading = false; + } + super._read(size); + } + _write(chunk, encoding, callback) { + if (this.shouldEmitWriting) { + this.emit('writing'); + this.shouldEmitWriting = false; + } + // Per the nodejs documention, callback must be invoked on the next tick + process.nextTick(() => { + super._write(chunk, encoding, callback); + }); + } + _final(callback) { + // If the stream is empty (i.e. empty file) final will be invoked before _read / _write + // and we should still emit the proper events. + if (this.shouldEmitReading) { + this.emit('reading'); + this.shouldEmitReading = false; + } + if (this.shouldEmitWriting) { + this.emit('writing'); + this.shouldEmitWriting = false; + } + callback(null); + } +} +exports.PassThroughShim = PassThroughShim; + + +/***/ }), + +/***/ 3660: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __assign = (this && this.__assign) || function () { + __assign = Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; + }; + return __assign.apply(this, arguments); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.encode = encode; +exports.decodeEntity = decodeEntity; +exports.decode = decode; +var named_references_js_1 = __nccwpck_require__(6000); +var numeric_unicode_map_js_1 = __nccwpck_require__(1057); +var surrogate_pairs_js_1 = __nccwpck_require__(9718); +var allNamedReferences = __assign(__assign({}, named_references_js_1.namedReferences), { all: named_references_js_1.namedReferences.html5 }); +var encodeRegExps = { + specialChars: /[<>'"&]/g, + nonAscii: /[<>'"&\u0080-\uD7FF\uE000-\uFFFF\uDC00-\uDFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]?/g, + nonAsciiPrintable: /[<>'"&\x01-\x08\x11-\x15\x17-\x1F\x7f-\uD7FF\uE000-\uFFFF\uDC00-\uDFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]?/g, + nonAsciiPrintableOnly: /[\x01-\x08\x11-\x15\x17-\x1F\x7f-\uD7FF\uE000-\uFFFF\uDC00-\uDFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]?/g, + extensive: /[\x01-\x0c\x0e-\x1f\x21-\x2c\x2e-\x2f\x3a-\x40\x5b-\x60\x7b-\x7d\x7f-\uD7FF\uE000-\uFFFF\uDC00-\uDFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]?/g +}; +var defaultEncodeOptions = { + mode: 'specialChars', + level: 'all', + numeric: 'decimal' +}; +/** Encodes all the necessary (specified by `level`) characters in the text */ +function encode(text, _a) { + var _b = _a === void 0 ? defaultEncodeOptions : _a, _c = _b.mode, mode = _c === void 0 ? 'specialChars' : _c, _d = _b.numeric, numeric = _d === void 0 ? 'decimal' : _d, _e = _b.level, level = _e === void 0 ? 'all' : _e; + if (!text) { + return ''; + } + var encodeRegExp = encodeRegExps[mode]; + var references = allNamedReferences[level].characters; + var isHex = numeric === 'hexadecimal'; + return String.prototype.replace.call(text, encodeRegExp, function (input) { + var result = references[input]; + if (!result) { + var code = input.length > 1 ? (0, surrogate_pairs_js_1.getCodePoint)(input, 0) : input.charCodeAt(0); + result = (isHex ? '&#x' + code.toString(16) : '&#' + code) + ';'; + } + return result; + }); +} +var defaultDecodeOptions = { + scope: 'body', + level: 'all' +}; +var strict = /&(?:#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+);/g; +var attribute = /&(?:#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+)[;=]?/g; +var baseDecodeRegExps = { + xml: { + strict: strict, + attribute: attribute, + body: named_references_js_1.bodyRegExps.xml + }, + html4: { + strict: strict, + attribute: attribute, + body: named_references_js_1.bodyRegExps.html4 + }, + html5: { + strict: strict, + attribute: attribute, + body: named_references_js_1.bodyRegExps.html5 + } +}; +var decodeRegExps = __assign(__assign({}, baseDecodeRegExps), { all: baseDecodeRegExps.html5 }); +var fromCharCode = String.fromCharCode; +var outOfBoundsChar = fromCharCode(65533); +var defaultDecodeEntityOptions = { + level: 'all' +}; +function getDecodedEntity(entity, references, isAttribute, isStrict) { + var decodeResult = entity; + var decodeEntityLastChar = entity[entity.length - 1]; + if (isAttribute && decodeEntityLastChar === '=') { + decodeResult = entity; + } + else if (isStrict && decodeEntityLastChar !== ';') { + decodeResult = entity; + } + else { + var decodeResultByReference = references[entity]; + if (decodeResultByReference) { + decodeResult = decodeResultByReference; + } + else if (entity[0] === '&' && entity[1] === '#') { + var decodeSecondChar = entity[2]; + var decodeCode = decodeSecondChar == 'x' || decodeSecondChar == 'X' + ? parseInt(entity.substr(3), 16) + : parseInt(entity.substr(2)); + decodeResult = + decodeCode >= 0x10ffff + ? outOfBoundsChar + : decodeCode > 65535 + ? (0, surrogate_pairs_js_1.fromCodePoint)(decodeCode) + : fromCharCode(numeric_unicode_map_js_1.numericUnicodeMap[decodeCode] || decodeCode); + } + } + return decodeResult; +} +/** Decodes a single entity */ +function decodeEntity(entity, _a) { + var _b = _a === void 0 ? defaultDecodeEntityOptions : _a, _c = _b.level, level = _c === void 0 ? 'all' : _c; + if (!entity) { + return ''; + } + return getDecodedEntity(entity, allNamedReferences[level].entities, false, false); +} +/** Decodes all entities in the text */ +function decode(text, _a) { + var _b = _a === void 0 ? defaultDecodeOptions : _a, _c = _b.level, level = _c === void 0 ? 'all' : _c, _d = _b.scope, scope = _d === void 0 ? level === 'xml' ? 'strict' : 'body' : _d; + if (!text) { + return ''; + } + var decodeRegExp = decodeRegExps[level][scope]; + var references = allNamedReferences[level].entities; + var isAttribute = scope === 'attribute'; + var isStrict = scope === 'strict'; + return text.replace(decodeRegExp, function (entity) { return getDecodedEntity(entity, references, isAttribute, isStrict); }); +} +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 6000: +/***/ (function(__unused_webpack_module, exports) { + +"use strict"; + +var __assign = (this && this.__assign) || function () { + __assign = Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; + }; + return __assign.apply(this, arguments); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.namedReferences = exports.bodyRegExps = void 0; +// This file is autogenerated by tools/process-named-references.ts +var pairDivider = "~"; +var blockDivider = "~~"; +function generateNamedReferences(input, prev) { + var entities = {}; + var characters = {}; + var blocks = input.split(blockDivider); + var isOptionalBlock = false; + for (var i = 0; blocks.length > i; i++) { + var entries = blocks[i].split(pairDivider); + for (var j = 0; j < entries.length; j += 2) { + var entity = entries[j]; + var character = entries[j + 1]; + var fullEntity = '&' + entity + ';'; + entities[fullEntity] = character; + if (isOptionalBlock) { + entities['&' + entity] = character; + } + characters[character] = fullEntity; + } + isOptionalBlock = true; + } + return prev ? + { entities: __assign(__assign({}, entities), prev.entities), characters: __assign(__assign({}, characters), prev.characters) } : + { entities: entities, characters: characters }; +} +exports.bodyRegExps = { + xml: /&(?:#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+);?/g, + html4: /∉|&(?:nbsp|iexcl|cent|pound|curren|yen|brvbar|sect|uml|copy|ordf|laquo|not|shy|reg|macr|deg|plusmn|sup2|sup3|acute|micro|para|middot|cedil|sup1|ordm|raquo|frac14|frac12|frac34|iquest|Agrave|Aacute|Acirc|Atilde|Auml|Aring|AElig|Ccedil|Egrave|Eacute|Ecirc|Euml|Igrave|Iacute|Icirc|Iuml|ETH|Ntilde|Ograve|Oacute|Ocirc|Otilde|Ouml|times|Oslash|Ugrave|Uacute|Ucirc|Uuml|Yacute|THORN|szlig|agrave|aacute|acirc|atilde|auml|aring|aelig|ccedil|egrave|eacute|ecirc|euml|igrave|iacute|icirc|iuml|eth|ntilde|ograve|oacute|ocirc|otilde|ouml|divide|oslash|ugrave|uacute|ucirc|uuml|yacute|thorn|yuml|quot|amp|lt|gt|#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+);?/g, + html5: /·|℗|⋇|⪧|⩺|⋗|⦕|⩼|⪆|⥸|⋗|⋛|⪌|≷|≳|⪦|⩹|⋖|⋋|⋉|⥶|⩻|⦖|◃|⊴|◂|∉|⋹̸|⋵̸|∉|⋷|⋶|∌|∌|⋾|⋽|∥|⊠|⨱|⨰|&(?:AElig|AMP|Aacute|Acirc|Agrave|Aring|Atilde|Auml|COPY|Ccedil|ETH|Eacute|Ecirc|Egrave|Euml|GT|Iacute|Icirc|Igrave|Iuml|LT|Ntilde|Oacute|Ocirc|Ograve|Oslash|Otilde|Ouml|QUOT|REG|THORN|Uacute|Ucirc|Ugrave|Uuml|Yacute|aacute|acirc|acute|aelig|agrave|amp|aring|atilde|auml|brvbar|ccedil|cedil|cent|copy|curren|deg|divide|eacute|ecirc|egrave|eth|euml|frac12|frac14|frac34|gt|iacute|icirc|iexcl|igrave|iquest|iuml|laquo|lt|macr|micro|middot|nbsp|not|ntilde|oacute|ocirc|ograve|ordf|ordm|oslash|otilde|ouml|para|plusmn|pound|quot|raquo|reg|sect|shy|sup1|sup2|sup3|szlig|thorn|times|uacute|ucirc|ugrave|uml|uuml|yacute|yen|yuml|#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+);?/g +}; +exports.namedReferences = {}; +exports.namedReferences.xml = generateNamedReferences("lt~<~gt~>~quot~\"~apos~'~amp~&"); +exports.namedReferences.html4 = generateNamedReferences("apos~'~OElig~Œ~oelig~œ~Scaron~Š~scaron~š~Yuml~Ÿ~circ~ˆ~tilde~˜~ensp~ ~emsp~ ~thinsp~ ~zwnj~‌~zwj~‍~lrm~‎~rlm~‏~ndash~–~mdash~—~lsquo~‘~rsquo~’~sbquo~‚~ldquo~“~rdquo~”~bdquo~„~dagger~†~Dagger~‡~permil~‰~lsaquo~‹~rsaquo~›~euro~€~fnof~ƒ~Alpha~Α~Beta~Β~Gamma~Γ~Delta~Δ~Epsilon~Ε~Zeta~Ζ~Eta~Η~Theta~Θ~Iota~Ι~Kappa~Κ~Lambda~Λ~Mu~Μ~Nu~Ν~Xi~Ξ~Omicron~Ο~Pi~Π~Rho~Ρ~Sigma~Σ~Tau~Τ~Upsilon~Υ~Phi~Φ~Chi~Χ~Psi~Ψ~Omega~Ω~alpha~α~beta~β~gamma~γ~delta~δ~epsilon~ε~zeta~ζ~eta~η~theta~θ~iota~ι~kappa~κ~lambda~λ~mu~μ~nu~ν~xi~ξ~omicron~ο~pi~π~rho~ρ~sigmaf~ς~sigma~σ~tau~τ~upsilon~υ~phi~φ~chi~χ~psi~ψ~omega~ω~thetasym~ϑ~upsih~ϒ~piv~ϖ~bull~•~hellip~…~prime~′~Prime~″~oline~‾~frasl~⁄~weierp~℘~image~ℑ~real~ℜ~trade~™~alefsym~ℵ~larr~←~uarr~↑~rarr~→~darr~↓~harr~↔~crarr~↵~lArr~⇐~uArr~⇑~rArr~⇒~dArr~⇓~hArr~⇔~forall~∀~part~∂~exist~∃~empty~∅~nabla~∇~isin~∈~notin~∉~ni~∋~prod~∏~sum~∑~minus~−~lowast~∗~radic~√~prop~∝~infin~∞~ang~∠~and~∧~or~∨~cap~∩~cup~∪~int~∫~there4~∴~sim~∼~cong~≅~asymp~≈~ne~≠~equiv~≡~le~≤~ge~≥~sub~⊂~sup~⊃~nsub~⊄~sube~⊆~supe~⊇~oplus~⊕~otimes~⊗~perp~⊥~sdot~⋅~lceil~⌈~rceil~⌉~lfloor~⌊~rfloor~⌋~lang~〈~rang~〉~loz~◊~spades~♠~clubs~♣~hearts~♥~diams~♦~~nbsp~ ~iexcl~¡~cent~¢~pound~£~curren~¤~yen~¥~brvbar~¦~sect~§~uml~¨~copy~©~ordf~ª~laquo~«~not~¬~shy~­~reg~®~macr~¯~deg~°~plusmn~±~sup2~²~sup3~³~acute~´~micro~µ~para~¶~middot~·~cedil~¸~sup1~¹~ordm~º~raquo~»~frac14~¼~frac12~½~frac34~¾~iquest~¿~Agrave~À~Aacute~Á~Acirc~Â~Atilde~Ã~Auml~Ä~Aring~Å~AElig~Æ~Ccedil~Ç~Egrave~È~Eacute~É~Ecirc~Ê~Euml~Ë~Igrave~Ì~Iacute~Í~Icirc~Î~Iuml~Ï~ETH~Ð~Ntilde~Ñ~Ograve~Ò~Oacute~Ó~Ocirc~Ô~Otilde~Õ~Ouml~Ö~times~×~Oslash~Ø~Ugrave~Ù~Uacute~Ú~Ucirc~Û~Uuml~Ü~Yacute~Ý~THORN~Þ~szlig~ß~agrave~à~aacute~á~acirc~â~atilde~ã~auml~ä~aring~å~aelig~æ~ccedil~ç~egrave~è~eacute~é~ecirc~ê~euml~ë~igrave~ì~iacute~í~icirc~î~iuml~ï~eth~ð~ntilde~ñ~ograve~ò~oacute~ó~ocirc~ô~otilde~õ~ouml~ö~divide~÷~oslash~ø~ugrave~ù~uacute~ú~ucirc~û~uuml~ü~yacute~ý~thorn~þ~yuml~ÿ~quot~\"~amp~&~lt~<~gt~>"); +exports.namedReferences.html5 = generateNamedReferences("Abreve~Ă~Acy~А~Afr~𝔄~Amacr~Ā~And~⩓~Aogon~Ą~Aopf~𝔸~ApplyFunction~⁡~Ascr~𝒜~Assign~≔~Backslash~∖~Barv~⫧~Barwed~⌆~Bcy~Б~Because~∵~Bernoullis~ℬ~Bfr~𝔅~Bopf~𝔹~Breve~˘~Bscr~ℬ~Bumpeq~≎~CHcy~Ч~Cacute~Ć~Cap~⋒~CapitalDifferentialD~ⅅ~Cayleys~ℭ~Ccaron~Č~Ccirc~Ĉ~Cconint~∰~Cdot~Ċ~Cedilla~¸~CenterDot~·~Cfr~ℭ~CircleDot~⊙~CircleMinus~⊖~CirclePlus~⊕~CircleTimes~⊗~ClockwiseContourIntegral~∲~CloseCurlyDoubleQuote~”~CloseCurlyQuote~’~Colon~∷~Colone~⩴~Congruent~≡~Conint~∯~ContourIntegral~∮~Copf~ℂ~Coproduct~∐~CounterClockwiseContourIntegral~∳~Cross~⨯~Cscr~𝒞~Cup~⋓~CupCap~≍~DD~ⅅ~DDotrahd~⤑~DJcy~Ђ~DScy~Ѕ~DZcy~Џ~Darr~↡~Dashv~⫤~Dcaron~Ď~Dcy~Д~Del~∇~Dfr~𝔇~DiacriticalAcute~´~DiacriticalDot~˙~DiacriticalDoubleAcute~˝~DiacriticalGrave~`~DiacriticalTilde~˜~Diamond~⋄~DifferentialD~ⅆ~Dopf~𝔻~Dot~¨~DotDot~⃜~DotEqual~≐~DoubleContourIntegral~∯~DoubleDot~¨~DoubleDownArrow~⇓~DoubleLeftArrow~⇐~DoubleLeftRightArrow~⇔~DoubleLeftTee~⫤~DoubleLongLeftArrow~⟸~DoubleLongLeftRightArrow~⟺~DoubleLongRightArrow~⟹~DoubleRightArrow~⇒~DoubleRightTee~⊨~DoubleUpArrow~⇑~DoubleUpDownArrow~⇕~DoubleVerticalBar~∥~DownArrow~↓~DownArrowBar~⤓~DownArrowUpArrow~⇵~DownBreve~̑~DownLeftRightVector~⥐~DownLeftTeeVector~⥞~DownLeftVector~↽~DownLeftVectorBar~⥖~DownRightTeeVector~⥟~DownRightVector~⇁~DownRightVectorBar~⥗~DownTee~⊤~DownTeeArrow~↧~Downarrow~⇓~Dscr~𝒟~Dstrok~Đ~ENG~Ŋ~Ecaron~Ě~Ecy~Э~Edot~Ė~Efr~𝔈~Element~∈~Emacr~Ē~EmptySmallSquare~◻~EmptyVerySmallSquare~▫~Eogon~Ę~Eopf~𝔼~Equal~⩵~EqualTilde~≂~Equilibrium~⇌~Escr~ℰ~Esim~⩳~Exists~∃~ExponentialE~ⅇ~Fcy~Ф~Ffr~𝔉~FilledSmallSquare~◼~FilledVerySmallSquare~▪~Fopf~𝔽~ForAll~∀~Fouriertrf~ℱ~Fscr~ℱ~GJcy~Ѓ~Gammad~Ϝ~Gbreve~Ğ~Gcedil~Ģ~Gcirc~Ĝ~Gcy~Г~Gdot~Ġ~Gfr~𝔊~Gg~⋙~Gopf~𝔾~GreaterEqual~≥~GreaterEqualLess~⋛~GreaterFullEqual~≧~GreaterGreater~⪢~GreaterLess~≷~GreaterSlantEqual~⩾~GreaterTilde~≳~Gscr~𝒢~Gt~≫~HARDcy~Ъ~Hacek~ˇ~Hat~^~Hcirc~Ĥ~Hfr~ℌ~HilbertSpace~ℋ~Hopf~ℍ~HorizontalLine~─~Hscr~ℋ~Hstrok~Ħ~HumpDownHump~≎~HumpEqual~≏~IEcy~Е~IJlig~IJ~IOcy~Ё~Icy~И~Idot~İ~Ifr~ℑ~Im~ℑ~Imacr~Ī~ImaginaryI~ⅈ~Implies~⇒~Int~∬~Integral~∫~Intersection~⋂~InvisibleComma~⁣~InvisibleTimes~⁢~Iogon~Į~Iopf~𝕀~Iscr~ℐ~Itilde~Ĩ~Iukcy~І~Jcirc~Ĵ~Jcy~Й~Jfr~𝔍~Jopf~𝕁~Jscr~𝒥~Jsercy~Ј~Jukcy~Є~KHcy~Х~KJcy~Ќ~Kcedil~Ķ~Kcy~К~Kfr~𝔎~Kopf~𝕂~Kscr~𝒦~LJcy~Љ~Lacute~Ĺ~Lang~⟪~Laplacetrf~ℒ~Larr~↞~Lcaron~Ľ~Lcedil~Ļ~Lcy~Л~LeftAngleBracket~⟨~LeftArrow~←~LeftArrowBar~⇤~LeftArrowRightArrow~⇆~LeftCeiling~⌈~LeftDoubleBracket~⟦~LeftDownTeeVector~⥡~LeftDownVector~⇃~LeftDownVectorBar~⥙~LeftFloor~⌊~LeftRightArrow~↔~LeftRightVector~⥎~LeftTee~⊣~LeftTeeArrow~↤~LeftTeeVector~⥚~LeftTriangle~⊲~LeftTriangleBar~⧏~LeftTriangleEqual~⊴~LeftUpDownVector~⥑~LeftUpTeeVector~⥠~LeftUpVector~↿~LeftUpVectorBar~⥘~LeftVector~↼~LeftVectorBar~⥒~Leftarrow~⇐~Leftrightarrow~⇔~LessEqualGreater~⋚~LessFullEqual~≦~LessGreater~≶~LessLess~⪡~LessSlantEqual~⩽~LessTilde~≲~Lfr~𝔏~Ll~⋘~Lleftarrow~⇚~Lmidot~Ŀ~LongLeftArrow~⟵~LongLeftRightArrow~⟷~LongRightArrow~⟶~Longleftarrow~⟸~Longleftrightarrow~⟺~Longrightarrow~⟹~Lopf~𝕃~LowerLeftArrow~↙~LowerRightArrow~↘~Lscr~ℒ~Lsh~↰~Lstrok~Ł~Lt~≪~Map~⤅~Mcy~М~MediumSpace~ ~Mellintrf~ℳ~Mfr~𝔐~MinusPlus~∓~Mopf~𝕄~Mscr~ℳ~NJcy~Њ~Nacute~Ń~Ncaron~Ň~Ncedil~Ņ~Ncy~Н~NegativeMediumSpace~​~NegativeThickSpace~​~NegativeThinSpace~​~NegativeVeryThinSpace~​~NestedGreaterGreater~≫~NestedLessLess~≪~NewLine~\n~Nfr~𝔑~NoBreak~⁠~NonBreakingSpace~ ~Nopf~ℕ~Not~⫬~NotCongruent~≢~NotCupCap~≭~NotDoubleVerticalBar~∦~NotElement~∉~NotEqual~≠~NotEqualTilde~≂̸~NotExists~∄~NotGreater~≯~NotGreaterEqual~≱~NotGreaterFullEqual~≧̸~NotGreaterGreater~≫̸~NotGreaterLess~≹~NotGreaterSlantEqual~⩾̸~NotGreaterTilde~≵~NotHumpDownHump~≎̸~NotHumpEqual~≏̸~NotLeftTriangle~⋪~NotLeftTriangleBar~⧏̸~NotLeftTriangleEqual~⋬~NotLess~≮~NotLessEqual~≰~NotLessGreater~≸~NotLessLess~≪̸~NotLessSlantEqual~⩽̸~NotLessTilde~≴~NotNestedGreaterGreater~⪢̸~NotNestedLessLess~⪡̸~NotPrecedes~⊀~NotPrecedesEqual~⪯̸~NotPrecedesSlantEqual~⋠~NotReverseElement~∌~NotRightTriangle~⋫~NotRightTriangleBar~⧐̸~NotRightTriangleEqual~⋭~NotSquareSubset~⊏̸~NotSquareSubsetEqual~⋢~NotSquareSuperset~⊐̸~NotSquareSupersetEqual~⋣~NotSubset~⊂⃒~NotSubsetEqual~⊈~NotSucceeds~⊁~NotSucceedsEqual~⪰̸~NotSucceedsSlantEqual~⋡~NotSucceedsTilde~≿̸~NotSuperset~⊃⃒~NotSupersetEqual~⊉~NotTilde~≁~NotTildeEqual~≄~NotTildeFullEqual~≇~NotTildeTilde~≉~NotVerticalBar~∤~Nscr~𝒩~Ocy~О~Odblac~Ő~Ofr~𝔒~Omacr~Ō~Oopf~𝕆~OpenCurlyDoubleQuote~“~OpenCurlyQuote~‘~Or~⩔~Oscr~𝒪~Otimes~⨷~OverBar~‾~OverBrace~⏞~OverBracket~⎴~OverParenthesis~⏜~PartialD~∂~Pcy~П~Pfr~𝔓~PlusMinus~±~Poincareplane~ℌ~Popf~ℙ~Pr~⪻~Precedes~≺~PrecedesEqual~⪯~PrecedesSlantEqual~≼~PrecedesTilde~≾~Product~∏~Proportion~∷~Proportional~∝~Pscr~𝒫~Qfr~𝔔~Qopf~ℚ~Qscr~𝒬~RBarr~⤐~Racute~Ŕ~Rang~⟫~Rarr~↠~Rarrtl~⤖~Rcaron~Ř~Rcedil~Ŗ~Rcy~Р~Re~ℜ~ReverseElement~∋~ReverseEquilibrium~⇋~ReverseUpEquilibrium~⥯~Rfr~ℜ~RightAngleBracket~⟩~RightArrow~→~RightArrowBar~⇥~RightArrowLeftArrow~⇄~RightCeiling~⌉~RightDoubleBracket~⟧~RightDownTeeVector~⥝~RightDownVector~⇂~RightDownVectorBar~⥕~RightFloor~⌋~RightTee~⊢~RightTeeArrow~↦~RightTeeVector~⥛~RightTriangle~⊳~RightTriangleBar~⧐~RightTriangleEqual~⊵~RightUpDownVector~⥏~RightUpTeeVector~⥜~RightUpVector~↾~RightUpVectorBar~⥔~RightVector~⇀~RightVectorBar~⥓~Rightarrow~⇒~Ropf~ℝ~RoundImplies~⥰~Rrightarrow~⇛~Rscr~ℛ~Rsh~↱~RuleDelayed~⧴~SHCHcy~Щ~SHcy~Ш~SOFTcy~Ь~Sacute~Ś~Sc~⪼~Scedil~Ş~Scirc~Ŝ~Scy~С~Sfr~𝔖~ShortDownArrow~↓~ShortLeftArrow~←~ShortRightArrow~→~ShortUpArrow~↑~SmallCircle~∘~Sopf~𝕊~Sqrt~√~Square~□~SquareIntersection~⊓~SquareSubset~⊏~SquareSubsetEqual~⊑~SquareSuperset~⊐~SquareSupersetEqual~⊒~SquareUnion~⊔~Sscr~𝒮~Star~⋆~Sub~⋐~Subset~⋐~SubsetEqual~⊆~Succeeds~≻~SucceedsEqual~⪰~SucceedsSlantEqual~≽~SucceedsTilde~≿~SuchThat~∋~Sum~∑~Sup~⋑~Superset~⊃~SupersetEqual~⊇~Supset~⋑~TRADE~™~TSHcy~Ћ~TScy~Ц~Tab~\t~Tcaron~Ť~Tcedil~Ţ~Tcy~Т~Tfr~𝔗~Therefore~∴~ThickSpace~  ~ThinSpace~ ~Tilde~∼~TildeEqual~≃~TildeFullEqual~≅~TildeTilde~≈~Topf~𝕋~TripleDot~⃛~Tscr~𝒯~Tstrok~Ŧ~Uarr~↟~Uarrocir~⥉~Ubrcy~Ў~Ubreve~Ŭ~Ucy~У~Udblac~Ű~Ufr~𝔘~Umacr~Ū~UnderBar~_~UnderBrace~⏟~UnderBracket~⎵~UnderParenthesis~⏝~Union~⋃~UnionPlus~⊎~Uogon~Ų~Uopf~𝕌~UpArrow~↑~UpArrowBar~⤒~UpArrowDownArrow~⇅~UpDownArrow~↕~UpEquilibrium~⥮~UpTee~⊥~UpTeeArrow~↥~Uparrow~⇑~Updownarrow~⇕~UpperLeftArrow~↖~UpperRightArrow~↗~Upsi~ϒ~Uring~Ů~Uscr~𝒰~Utilde~Ũ~VDash~⊫~Vbar~⫫~Vcy~В~Vdash~⊩~Vdashl~⫦~Vee~⋁~Verbar~‖~Vert~‖~VerticalBar~∣~VerticalLine~|~VerticalSeparator~❘~VerticalTilde~≀~VeryThinSpace~ ~Vfr~𝔙~Vopf~𝕍~Vscr~𝒱~Vvdash~⊪~Wcirc~Ŵ~Wedge~⋀~Wfr~𝔚~Wopf~𝕎~Wscr~𝒲~Xfr~𝔛~Xopf~𝕏~Xscr~𝒳~YAcy~Я~YIcy~Ї~YUcy~Ю~Ycirc~Ŷ~Ycy~Ы~Yfr~𝔜~Yopf~𝕐~Yscr~𝒴~ZHcy~Ж~Zacute~Ź~Zcaron~Ž~Zcy~З~Zdot~Ż~ZeroWidthSpace~​~Zfr~ℨ~Zopf~ℤ~Zscr~𝒵~abreve~ă~ac~∾~acE~∾̳~acd~∿~acy~а~af~⁡~afr~𝔞~aleph~ℵ~amacr~ā~amalg~⨿~andand~⩕~andd~⩜~andslope~⩘~andv~⩚~ange~⦤~angle~∠~angmsd~∡~angmsdaa~⦨~angmsdab~⦩~angmsdac~⦪~angmsdad~⦫~angmsdae~⦬~angmsdaf~⦭~angmsdag~⦮~angmsdah~⦯~angrt~∟~angrtvb~⊾~angrtvbd~⦝~angsph~∢~angst~Å~angzarr~⍼~aogon~ą~aopf~𝕒~ap~≈~apE~⩰~apacir~⩯~ape~≊~apid~≋~approx~≈~approxeq~≊~ascr~𝒶~ast~*~asympeq~≍~awconint~∳~awint~⨑~bNot~⫭~backcong~≌~backepsilon~϶~backprime~‵~backsim~∽~backsimeq~⋍~barvee~⊽~barwed~⌅~barwedge~⌅~bbrk~⎵~bbrktbrk~⎶~bcong~≌~bcy~б~becaus~∵~because~∵~bemptyv~⦰~bepsi~϶~bernou~ℬ~beth~ℶ~between~≬~bfr~𝔟~bigcap~⋂~bigcirc~◯~bigcup~⋃~bigodot~⨀~bigoplus~⨁~bigotimes~⨂~bigsqcup~⨆~bigstar~★~bigtriangledown~▽~bigtriangleup~△~biguplus~⨄~bigvee~⋁~bigwedge~⋀~bkarow~⤍~blacklozenge~⧫~blacksquare~▪~blacktriangle~▴~blacktriangledown~▾~blacktriangleleft~◂~blacktriangleright~▸~blank~␣~blk12~▒~blk14~░~blk34~▓~block~█~bne~=⃥~bnequiv~≡⃥~bnot~⌐~bopf~𝕓~bot~⊥~bottom~⊥~bowtie~⋈~boxDL~╗~boxDR~╔~boxDl~╖~boxDr~╓~boxH~═~boxHD~╦~boxHU~╩~boxHd~╤~boxHu~╧~boxUL~╝~boxUR~╚~boxUl~╜~boxUr~╙~boxV~║~boxVH~╬~boxVL~╣~boxVR~╠~boxVh~╫~boxVl~╢~boxVr~╟~boxbox~⧉~boxdL~╕~boxdR~╒~boxdl~┐~boxdr~┌~boxh~─~boxhD~╥~boxhU~╨~boxhd~┬~boxhu~┴~boxminus~⊟~boxplus~⊞~boxtimes~⊠~boxuL~╛~boxuR~╘~boxul~┘~boxur~└~boxv~│~boxvH~╪~boxvL~╡~boxvR~╞~boxvh~┼~boxvl~┤~boxvr~├~bprime~‵~breve~˘~bscr~𝒷~bsemi~⁏~bsim~∽~bsime~⋍~bsol~\\~bsolb~⧅~bsolhsub~⟈~bullet~•~bump~≎~bumpE~⪮~bumpe~≏~bumpeq~≏~cacute~ć~capand~⩄~capbrcup~⩉~capcap~⩋~capcup~⩇~capdot~⩀~caps~∩︀~caret~⁁~caron~ˇ~ccaps~⩍~ccaron~č~ccirc~ĉ~ccups~⩌~ccupssm~⩐~cdot~ċ~cemptyv~⦲~centerdot~·~cfr~𝔠~chcy~ч~check~✓~checkmark~✓~cir~○~cirE~⧃~circeq~≗~circlearrowleft~↺~circlearrowright~↻~circledR~®~circledS~Ⓢ~circledast~⊛~circledcirc~⊚~circleddash~⊝~cire~≗~cirfnint~⨐~cirmid~⫯~cirscir~⧂~clubsuit~♣~colon~:~colone~≔~coloneq~≔~comma~,~commat~@~comp~∁~compfn~∘~complement~∁~complexes~ℂ~congdot~⩭~conint~∮~copf~𝕔~coprod~∐~copysr~℗~cross~✗~cscr~𝒸~csub~⫏~csube~⫑~csup~⫐~csupe~⫒~ctdot~⋯~cudarrl~⤸~cudarrr~⤵~cuepr~⋞~cuesc~⋟~cularr~↶~cularrp~⤽~cupbrcap~⩈~cupcap~⩆~cupcup~⩊~cupdot~⊍~cupor~⩅~cups~∪︀~curarr~↷~curarrm~⤼~curlyeqprec~⋞~curlyeqsucc~⋟~curlyvee~⋎~curlywedge~⋏~curvearrowleft~↶~curvearrowright~↷~cuvee~⋎~cuwed~⋏~cwconint~∲~cwint~∱~cylcty~⌭~dHar~⥥~daleth~ℸ~dash~‐~dashv~⊣~dbkarow~⤏~dblac~˝~dcaron~ď~dcy~д~dd~ⅆ~ddagger~‡~ddarr~⇊~ddotseq~⩷~demptyv~⦱~dfisht~⥿~dfr~𝔡~dharl~⇃~dharr~⇂~diam~⋄~diamond~⋄~diamondsuit~♦~die~¨~digamma~ϝ~disin~⋲~div~÷~divideontimes~⋇~divonx~⋇~djcy~ђ~dlcorn~⌞~dlcrop~⌍~dollar~$~dopf~𝕕~dot~˙~doteq~≐~doteqdot~≑~dotminus~∸~dotplus~∔~dotsquare~⊡~doublebarwedge~⌆~downarrow~↓~downdownarrows~⇊~downharpoonleft~⇃~downharpoonright~⇂~drbkarow~⤐~drcorn~⌟~drcrop~⌌~dscr~𝒹~dscy~ѕ~dsol~⧶~dstrok~đ~dtdot~⋱~dtri~▿~dtrif~▾~duarr~⇵~duhar~⥯~dwangle~⦦~dzcy~џ~dzigrarr~⟿~eDDot~⩷~eDot~≑~easter~⩮~ecaron~ě~ecir~≖~ecolon~≕~ecy~э~edot~ė~ee~ⅇ~efDot~≒~efr~𝔢~eg~⪚~egs~⪖~egsdot~⪘~el~⪙~elinters~⏧~ell~ℓ~els~⪕~elsdot~⪗~emacr~ē~emptyset~∅~emptyv~∅~emsp13~ ~emsp14~ ~eng~ŋ~eogon~ę~eopf~𝕖~epar~⋕~eparsl~⧣~eplus~⩱~epsi~ε~epsiv~ϵ~eqcirc~≖~eqcolon~≕~eqsim~≂~eqslantgtr~⪖~eqslantless~⪕~equals~=~equest~≟~equivDD~⩸~eqvparsl~⧥~erDot~≓~erarr~⥱~escr~ℯ~esdot~≐~esim~≂~excl~!~expectation~ℰ~exponentiale~ⅇ~fallingdotseq~≒~fcy~ф~female~♀~ffilig~ffi~fflig~ff~ffllig~ffl~ffr~𝔣~filig~fi~fjlig~fj~flat~♭~fllig~fl~fltns~▱~fopf~𝕗~fork~⋔~forkv~⫙~fpartint~⨍~frac13~⅓~frac15~⅕~frac16~⅙~frac18~⅛~frac23~⅔~frac25~⅖~frac35~⅗~frac38~⅜~frac45~⅘~frac56~⅚~frac58~⅝~frac78~⅞~frown~⌢~fscr~𝒻~gE~≧~gEl~⪌~gacute~ǵ~gammad~ϝ~gap~⪆~gbreve~ğ~gcirc~ĝ~gcy~г~gdot~ġ~gel~⋛~geq~≥~geqq~≧~geqslant~⩾~ges~⩾~gescc~⪩~gesdot~⪀~gesdoto~⪂~gesdotol~⪄~gesl~⋛︀~gesles~⪔~gfr~𝔤~gg~≫~ggg~⋙~gimel~ℷ~gjcy~ѓ~gl~≷~glE~⪒~gla~⪥~glj~⪤~gnE~≩~gnap~⪊~gnapprox~⪊~gne~⪈~gneq~⪈~gneqq~≩~gnsim~⋧~gopf~𝕘~grave~`~gscr~ℊ~gsim~≳~gsime~⪎~gsiml~⪐~gtcc~⪧~gtcir~⩺~gtdot~⋗~gtlPar~⦕~gtquest~⩼~gtrapprox~⪆~gtrarr~⥸~gtrdot~⋗~gtreqless~⋛~gtreqqless~⪌~gtrless~≷~gtrsim~≳~gvertneqq~≩︀~gvnE~≩︀~hairsp~ ~half~½~hamilt~ℋ~hardcy~ъ~harrcir~⥈~harrw~↭~hbar~ℏ~hcirc~ĥ~heartsuit~♥~hercon~⊹~hfr~𝔥~hksearow~⤥~hkswarow~⤦~hoarr~⇿~homtht~∻~hookleftarrow~↩~hookrightarrow~↪~hopf~𝕙~horbar~―~hscr~𝒽~hslash~ℏ~hstrok~ħ~hybull~⁃~hyphen~‐~ic~⁣~icy~и~iecy~е~iff~⇔~ifr~𝔦~ii~ⅈ~iiiint~⨌~iiint~∭~iinfin~⧜~iiota~℩~ijlig~ij~imacr~ī~imagline~ℐ~imagpart~ℑ~imath~ı~imof~⊷~imped~Ƶ~in~∈~incare~℅~infintie~⧝~inodot~ı~intcal~⊺~integers~ℤ~intercal~⊺~intlarhk~⨗~intprod~⨼~iocy~ё~iogon~į~iopf~𝕚~iprod~⨼~iscr~𝒾~isinE~⋹~isindot~⋵~isins~⋴~isinsv~⋳~isinv~∈~it~⁢~itilde~ĩ~iukcy~і~jcirc~ĵ~jcy~й~jfr~𝔧~jmath~ȷ~jopf~𝕛~jscr~𝒿~jsercy~ј~jukcy~є~kappav~ϰ~kcedil~ķ~kcy~к~kfr~𝔨~kgreen~ĸ~khcy~х~kjcy~ќ~kopf~𝕜~kscr~𝓀~lAarr~⇚~lAtail~⤛~lBarr~⤎~lE~≦~lEg~⪋~lHar~⥢~lacute~ĺ~laemptyv~⦴~lagran~ℒ~langd~⦑~langle~⟨~lap~⪅~larrb~⇤~larrbfs~⤟~larrfs~⤝~larrhk~↩~larrlp~↫~larrpl~⤹~larrsim~⥳~larrtl~↢~lat~⪫~latail~⤙~late~⪭~lates~⪭︀~lbarr~⤌~lbbrk~❲~lbrace~{~lbrack~[~lbrke~⦋~lbrksld~⦏~lbrkslu~⦍~lcaron~ľ~lcedil~ļ~lcub~{~lcy~л~ldca~⤶~ldquor~„~ldrdhar~⥧~ldrushar~⥋~ldsh~↲~leftarrow~←~leftarrowtail~↢~leftharpoondown~↽~leftharpoonup~↼~leftleftarrows~⇇~leftrightarrow~↔~leftrightarrows~⇆~leftrightharpoons~⇋~leftrightsquigarrow~↭~leftthreetimes~⋋~leg~⋚~leq~≤~leqq~≦~leqslant~⩽~les~⩽~lescc~⪨~lesdot~⩿~lesdoto~⪁~lesdotor~⪃~lesg~⋚︀~lesges~⪓~lessapprox~⪅~lessdot~⋖~lesseqgtr~⋚~lesseqqgtr~⪋~lessgtr~≶~lesssim~≲~lfisht~⥼~lfr~𝔩~lg~≶~lgE~⪑~lhard~↽~lharu~↼~lharul~⥪~lhblk~▄~ljcy~љ~ll~≪~llarr~⇇~llcorner~⌞~llhard~⥫~lltri~◺~lmidot~ŀ~lmoust~⎰~lmoustache~⎰~lnE~≨~lnap~⪉~lnapprox~⪉~lne~⪇~lneq~⪇~lneqq~≨~lnsim~⋦~loang~⟬~loarr~⇽~lobrk~⟦~longleftarrow~⟵~longleftrightarrow~⟷~longmapsto~⟼~longrightarrow~⟶~looparrowleft~↫~looparrowright~↬~lopar~⦅~lopf~𝕝~loplus~⨭~lotimes~⨴~lowbar~_~lozenge~◊~lozf~⧫~lpar~(~lparlt~⦓~lrarr~⇆~lrcorner~⌟~lrhar~⇋~lrhard~⥭~lrtri~⊿~lscr~𝓁~lsh~↰~lsim~≲~lsime~⪍~lsimg~⪏~lsqb~[~lsquor~‚~lstrok~ł~ltcc~⪦~ltcir~⩹~ltdot~⋖~lthree~⋋~ltimes~⋉~ltlarr~⥶~ltquest~⩻~ltrPar~⦖~ltri~◃~ltrie~⊴~ltrif~◂~lurdshar~⥊~luruhar~⥦~lvertneqq~≨︀~lvnE~≨︀~mDDot~∺~male~♂~malt~✠~maltese~✠~map~↦~mapsto~↦~mapstodown~↧~mapstoleft~↤~mapstoup~↥~marker~▮~mcomma~⨩~mcy~м~measuredangle~∡~mfr~𝔪~mho~℧~mid~∣~midast~*~midcir~⫰~minusb~⊟~minusd~∸~minusdu~⨪~mlcp~⫛~mldr~…~mnplus~∓~models~⊧~mopf~𝕞~mp~∓~mscr~𝓂~mstpos~∾~multimap~⊸~mumap~⊸~nGg~⋙̸~nGt~≫⃒~nGtv~≫̸~nLeftarrow~⇍~nLeftrightarrow~⇎~nLl~⋘̸~nLt~≪⃒~nLtv~≪̸~nRightarrow~⇏~nVDash~⊯~nVdash~⊮~nacute~ń~nang~∠⃒~nap~≉~napE~⩰̸~napid~≋̸~napos~ʼn~napprox~≉~natur~♮~natural~♮~naturals~ℕ~nbump~≎̸~nbumpe~≏̸~ncap~⩃~ncaron~ň~ncedil~ņ~ncong~≇~ncongdot~⩭̸~ncup~⩂~ncy~н~neArr~⇗~nearhk~⤤~nearr~↗~nearrow~↗~nedot~≐̸~nequiv~≢~nesear~⤨~nesim~≂̸~nexist~∄~nexists~∄~nfr~𝔫~ngE~≧̸~nge~≱~ngeq~≱~ngeqq~≧̸~ngeqslant~⩾̸~nges~⩾̸~ngsim~≵~ngt~≯~ngtr~≯~nhArr~⇎~nharr~↮~nhpar~⫲~nis~⋼~nisd~⋺~niv~∋~njcy~њ~nlArr~⇍~nlE~≦̸~nlarr~↚~nldr~‥~nle~≰~nleftarrow~↚~nleftrightarrow~↮~nleq~≰~nleqq~≦̸~nleqslant~⩽̸~nles~⩽̸~nless~≮~nlsim~≴~nlt~≮~nltri~⋪~nltrie~⋬~nmid~∤~nopf~𝕟~notinE~⋹̸~notindot~⋵̸~notinva~∉~notinvb~⋷~notinvc~⋶~notni~∌~notniva~∌~notnivb~⋾~notnivc~⋽~npar~∦~nparallel~∦~nparsl~⫽⃥~npart~∂̸~npolint~⨔~npr~⊀~nprcue~⋠~npre~⪯̸~nprec~⊀~npreceq~⪯̸~nrArr~⇏~nrarr~↛~nrarrc~⤳̸~nrarrw~↝̸~nrightarrow~↛~nrtri~⋫~nrtrie~⋭~nsc~⊁~nsccue~⋡~nsce~⪰̸~nscr~𝓃~nshortmid~∤~nshortparallel~∦~nsim~≁~nsime~≄~nsimeq~≄~nsmid~∤~nspar~∦~nsqsube~⋢~nsqsupe~⋣~nsubE~⫅̸~nsube~⊈~nsubset~⊂⃒~nsubseteq~⊈~nsubseteqq~⫅̸~nsucc~⊁~nsucceq~⪰̸~nsup~⊅~nsupE~⫆̸~nsupe~⊉~nsupset~⊃⃒~nsupseteq~⊉~nsupseteqq~⫆̸~ntgl~≹~ntlg~≸~ntriangleleft~⋪~ntrianglelefteq~⋬~ntriangleright~⋫~ntrianglerighteq~⋭~num~#~numero~№~numsp~ ~nvDash~⊭~nvHarr~⤄~nvap~≍⃒~nvdash~⊬~nvge~≥⃒~nvgt~>⃒~nvinfin~⧞~nvlArr~⤂~nvle~≤⃒~nvlt~<⃒~nvltrie~⊴⃒~nvrArr~⤃~nvrtrie~⊵⃒~nvsim~∼⃒~nwArr~⇖~nwarhk~⤣~nwarr~↖~nwarrow~↖~nwnear~⤧~oS~Ⓢ~oast~⊛~ocir~⊚~ocy~о~odash~⊝~odblac~ő~odiv~⨸~odot~⊙~odsold~⦼~ofcir~⦿~ofr~𝔬~ogon~˛~ogt~⧁~ohbar~⦵~ohm~Ω~oint~∮~olarr~↺~olcir~⦾~olcross~⦻~olt~⧀~omacr~ō~omid~⦶~ominus~⊖~oopf~𝕠~opar~⦷~operp~⦹~orarr~↻~ord~⩝~order~ℴ~orderof~ℴ~origof~⊶~oror~⩖~orslope~⩗~orv~⩛~oscr~ℴ~osol~⊘~otimesas~⨶~ovbar~⌽~par~∥~parallel~∥~parsim~⫳~parsl~⫽~pcy~п~percnt~%~period~.~pertenk~‱~pfr~𝔭~phiv~ϕ~phmmat~ℳ~phone~☎~pitchfork~⋔~planck~ℏ~planckh~ℎ~plankv~ℏ~plus~+~plusacir~⨣~plusb~⊞~pluscir~⨢~plusdo~∔~plusdu~⨥~pluse~⩲~plussim~⨦~plustwo~⨧~pm~±~pointint~⨕~popf~𝕡~pr~≺~prE~⪳~prap~⪷~prcue~≼~pre~⪯~prec~≺~precapprox~⪷~preccurlyeq~≼~preceq~⪯~precnapprox~⪹~precneqq~⪵~precnsim~⋨~precsim~≾~primes~ℙ~prnE~⪵~prnap~⪹~prnsim~⋨~profalar~⌮~profline~⌒~profsurf~⌓~propto~∝~prsim~≾~prurel~⊰~pscr~𝓅~puncsp~ ~qfr~𝔮~qint~⨌~qopf~𝕢~qprime~⁗~qscr~𝓆~quaternions~ℍ~quatint~⨖~quest~?~questeq~≟~rAarr~⇛~rAtail~⤜~rBarr~⤏~rHar~⥤~race~∽̱~racute~ŕ~raemptyv~⦳~rangd~⦒~range~⦥~rangle~⟩~rarrap~⥵~rarrb~⇥~rarrbfs~⤠~rarrc~⤳~rarrfs~⤞~rarrhk~↪~rarrlp~↬~rarrpl~⥅~rarrsim~⥴~rarrtl~↣~rarrw~↝~ratail~⤚~ratio~∶~rationals~ℚ~rbarr~⤍~rbbrk~❳~rbrace~}~rbrack~]~rbrke~⦌~rbrksld~⦎~rbrkslu~⦐~rcaron~ř~rcedil~ŗ~rcub~}~rcy~р~rdca~⤷~rdldhar~⥩~rdquor~”~rdsh~↳~realine~ℛ~realpart~ℜ~reals~ℝ~rect~▭~rfisht~⥽~rfr~𝔯~rhard~⇁~rharu~⇀~rharul~⥬~rhov~ϱ~rightarrow~→~rightarrowtail~↣~rightharpoondown~⇁~rightharpoonup~⇀~rightleftarrows~⇄~rightleftharpoons~⇌~rightrightarrows~⇉~rightsquigarrow~↝~rightthreetimes~⋌~ring~˚~risingdotseq~≓~rlarr~⇄~rlhar~⇌~rmoust~⎱~rmoustache~⎱~rnmid~⫮~roang~⟭~roarr~⇾~robrk~⟧~ropar~⦆~ropf~𝕣~roplus~⨮~rotimes~⨵~rpar~)~rpargt~⦔~rppolint~⨒~rrarr~⇉~rscr~𝓇~rsh~↱~rsqb~]~rsquor~’~rthree~⋌~rtimes~⋊~rtri~▹~rtrie~⊵~rtrif~▸~rtriltri~⧎~ruluhar~⥨~rx~℞~sacute~ś~sc~≻~scE~⪴~scap~⪸~sccue~≽~sce~⪰~scedil~ş~scirc~ŝ~scnE~⪶~scnap~⪺~scnsim~⋩~scpolint~⨓~scsim~≿~scy~с~sdotb~⊡~sdote~⩦~seArr~⇘~searhk~⤥~searr~↘~searrow~↘~semi~;~seswar~⤩~setminus~∖~setmn~∖~sext~✶~sfr~𝔰~sfrown~⌢~sharp~♯~shchcy~щ~shcy~ш~shortmid~∣~shortparallel~∥~sigmav~ς~simdot~⩪~sime~≃~simeq~≃~simg~⪞~simgE~⪠~siml~⪝~simlE~⪟~simne~≆~simplus~⨤~simrarr~⥲~slarr~←~smallsetminus~∖~smashp~⨳~smeparsl~⧤~smid~∣~smile~⌣~smt~⪪~smte~⪬~smtes~⪬︀~softcy~ь~sol~/~solb~⧄~solbar~⌿~sopf~𝕤~spadesuit~♠~spar~∥~sqcap~⊓~sqcaps~⊓︀~sqcup~⊔~sqcups~⊔︀~sqsub~⊏~sqsube~⊑~sqsubset~⊏~sqsubseteq~⊑~sqsup~⊐~sqsupe~⊒~sqsupset~⊐~sqsupseteq~⊒~squ~□~square~□~squarf~▪~squf~▪~srarr~→~sscr~𝓈~ssetmn~∖~ssmile~⌣~sstarf~⋆~star~☆~starf~★~straightepsilon~ϵ~straightphi~ϕ~strns~¯~subE~⫅~subdot~⪽~subedot~⫃~submult~⫁~subnE~⫋~subne~⊊~subplus~⪿~subrarr~⥹~subset~⊂~subseteq~⊆~subseteqq~⫅~subsetneq~⊊~subsetneqq~⫋~subsim~⫇~subsub~⫕~subsup~⫓~succ~≻~succapprox~⪸~succcurlyeq~≽~succeq~⪰~succnapprox~⪺~succneqq~⪶~succnsim~⋩~succsim~≿~sung~♪~supE~⫆~supdot~⪾~supdsub~⫘~supedot~⫄~suphsol~⟉~suphsub~⫗~suplarr~⥻~supmult~⫂~supnE~⫌~supne~⊋~supplus~⫀~supset~⊃~supseteq~⊇~supseteqq~⫆~supsetneq~⊋~supsetneqq~⫌~supsim~⫈~supsub~⫔~supsup~⫖~swArr~⇙~swarhk~⤦~swarr~↙~swarrow~↙~swnwar~⤪~target~⌖~tbrk~⎴~tcaron~ť~tcedil~ţ~tcy~т~tdot~⃛~telrec~⌕~tfr~𝔱~therefore~∴~thetav~ϑ~thickapprox~≈~thicksim~∼~thkap~≈~thksim~∼~timesb~⊠~timesbar~⨱~timesd~⨰~tint~∭~toea~⤨~top~⊤~topbot~⌶~topcir~⫱~topf~𝕥~topfork~⫚~tosa~⤩~tprime~‴~triangle~▵~triangledown~▿~triangleleft~◃~trianglelefteq~⊴~triangleq~≜~triangleright~▹~trianglerighteq~⊵~tridot~◬~trie~≜~triminus~⨺~triplus~⨹~trisb~⧍~tritime~⨻~trpezium~⏢~tscr~𝓉~tscy~ц~tshcy~ћ~tstrok~ŧ~twixt~≬~twoheadleftarrow~↞~twoheadrightarrow~↠~uHar~⥣~ubrcy~ў~ubreve~ŭ~ucy~у~udarr~⇅~udblac~ű~udhar~⥮~ufisht~⥾~ufr~𝔲~uharl~↿~uharr~↾~uhblk~▀~ulcorn~⌜~ulcorner~⌜~ulcrop~⌏~ultri~◸~umacr~ū~uogon~ų~uopf~𝕦~uparrow~↑~updownarrow~↕~upharpoonleft~↿~upharpoonright~↾~uplus~⊎~upsi~υ~upuparrows~⇈~urcorn~⌝~urcorner~⌝~urcrop~⌎~uring~ů~urtri~◹~uscr~𝓊~utdot~⋰~utilde~ũ~utri~▵~utrif~▴~uuarr~⇈~uwangle~⦧~vArr~⇕~vBar~⫨~vBarv~⫩~vDash~⊨~vangrt~⦜~varepsilon~ϵ~varkappa~ϰ~varnothing~∅~varphi~ϕ~varpi~ϖ~varpropto~∝~varr~↕~varrho~ϱ~varsigma~ς~varsubsetneq~⊊︀~varsubsetneqq~⫋︀~varsupsetneq~⊋︀~varsupsetneqq~⫌︀~vartheta~ϑ~vartriangleleft~⊲~vartriangleright~⊳~vcy~в~vdash~⊢~vee~∨~veebar~⊻~veeeq~≚~vellip~⋮~verbar~|~vert~|~vfr~𝔳~vltri~⊲~vnsub~⊂⃒~vnsup~⊃⃒~vopf~𝕧~vprop~∝~vrtri~⊳~vscr~𝓋~vsubnE~⫋︀~vsubne~⊊︀~vsupnE~⫌︀~vsupne~⊋︀~vzigzag~⦚~wcirc~ŵ~wedbar~⩟~wedge~∧~wedgeq~≙~wfr~𝔴~wopf~𝕨~wp~℘~wr~≀~wreath~≀~wscr~𝓌~xcap~⋂~xcirc~◯~xcup~⋃~xdtri~▽~xfr~𝔵~xhArr~⟺~xharr~⟷~xlArr~⟸~xlarr~⟵~xmap~⟼~xnis~⋻~xodot~⨀~xopf~𝕩~xoplus~⨁~xotime~⨂~xrArr~⟹~xrarr~⟶~xscr~𝓍~xsqcup~⨆~xuplus~⨄~xutri~△~xvee~⋁~xwedge~⋀~yacy~я~ycirc~ŷ~ycy~ы~yfr~𝔶~yicy~ї~yopf~𝕪~yscr~𝓎~yucy~ю~zacute~ź~zcaron~ž~zcy~з~zdot~ż~zeetrf~ℨ~zfr~𝔷~zhcy~ж~zigrarr~⇝~zopf~𝕫~zscr~𝓏~~AMP~&~COPY~©~GT~>~LT~<~QUOT~\"~REG~®", exports.namedReferences['html4']); +//# sourceMappingURL=named-references.js.map + +/***/ }), + +/***/ 1057: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.numericUnicodeMap = void 0; +exports.numericUnicodeMap = { + 0: 65533, + 128: 8364, + 130: 8218, + 131: 402, + 132: 8222, + 133: 8230, + 134: 8224, + 135: 8225, + 136: 710, + 137: 8240, + 138: 352, + 139: 8249, + 140: 338, + 142: 381, + 145: 8216, + 146: 8217, + 147: 8220, + 148: 8221, + 149: 8226, + 150: 8211, + 151: 8212, + 152: 732, + 153: 8482, + 154: 353, + 155: 8250, + 156: 339, + 158: 382, + 159: 376 +}; +//# sourceMappingURL=numeric-unicode-map.js.map + +/***/ }), + +/***/ 9718: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.highSurrogateTo = exports.highSurrogateFrom = exports.getCodePoint = exports.fromCodePoint = void 0; +exports.fromCodePoint = String.fromCodePoint || + function (astralCodePoint) { + return String.fromCharCode(Math.floor((astralCodePoint - 0x10000) / 0x400) + 0xd800, ((astralCodePoint - 0x10000) % 0x400) + 0xdc00); + }; +// @ts-expect-error - String.prototype.codePointAt might not exist in older node versions +exports.getCodePoint = String.prototype.codePointAt + ? function (input, position) { + return input.codePointAt(position); + } + : function (input, position) { + return (input.charCodeAt(position) - 0xd800) * 0x400 + input.charCodeAt(position + 1) - 0xdc00 + 0x10000; + }; +exports.highSurrogateFrom = 0xd800; +exports.highSurrogateTo = 0xdbff; +//# sourceMappingURL=surrogate-pairs.js.map + +/***/ }), + +/***/ 1969: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/* eslint-disable node/no-missing-require */ + +function getPackageJSON() { + return __nccwpck_require__(2721); +} + +exports.getPackageJSON = getPackageJSON; + + /***/ }), /***/ 4012: @@ -65404,6 +107257,30 @@ module.exports = /*#__PURE__*/JSON.parse('{"name":"@actions/cache","version":"4. /***/ }), +/***/ 2721: +/***/ ((module) => { + +"use strict"; +module.exports = /*#__PURE__*/JSON.parse('{"name":"@google-cloud/storage","description":"Cloud Storage Client Library for Node.js","version":"7.16.0","license":"Apache-2.0","author":"Google Inc.","engines":{"node":">=14"},"repository":"googleapis/nodejs-storage","main":"./build/cjs/src/index.js","types":"./build/cjs/src/index.d.ts","type":"module","exports":{".":{"import":{"types":"./build/esm/src/index.d.ts","default":"./build/esm/src/index.js"},"require":{"types":"./build/cjs/src/index.d.ts","default":"./build/cjs/src/index.js"}}},"files":["build/cjs/src","build/cjs/package.json","!build/cjs/src/**/*.map","build/esm/src","!build/esm/src/**/*.map"],"keywords":["google apis client","google api client","google apis","google api","google","google cloud platform","google cloud","cloud","google storage","storage"],"scripts":{"all-test":"npm test && npm run system-test && npm run samples-test","benchwrapper":"node bin/benchwrapper.js","check":"gts check","clean":"rm -rf build/","compile:cjs":"tsc -p ./tsconfig.cjs.json","compile:esm":"tsc -p .","compile":"npm run compile:cjs && npm run compile:esm","conformance-test":"mocha --parallel build/cjs/conformance-test/ --require build/cjs/conformance-test/globalHooks.js","docs-test":"linkinator docs","docs":"jsdoc -c .jsdoc.json","fix":"gts fix","lint":"gts check","postcompile":"cp ./src/package-json-helper.cjs ./build/cjs/src && cp ./src/package-json-helper.cjs ./build/esm/src","postcompile:cjs":"babel --plugins gapic-tools/build/src/replaceImportMetaUrl,gapic-tools/build/src/toggleESMFlagVariable build/cjs/src/util.js -o build/cjs/src/util.js && cp internal-tooling/helpers/package.cjs.json build/cjs/package.json","precompile":"rm -rf build/","preconformance-test":"npm run compile:cjs -- --sourceMap","predocs-test":"npm run docs","predocs":"npm run compile:cjs -- --sourceMap","prelint":"cd samples; npm link ../; npm install","prepare":"npm run compile","presystem-test:esm":"npm run compile:esm","presystem-test":"npm run compile -- --sourceMap","pretest":"npm run compile -- --sourceMap","samples-test":"npm link && cd samples/ && npm link ../ && npm test && cd ../","system-test:esm":"mocha build/esm/system-test --timeout 600000 --exit","system-test":"mocha build/cjs/system-test --timeout 600000 --exit","test":"c8 mocha build/cjs/test"},"dependencies":{"@google-cloud/paginator":"^5.0.0","@google-cloud/projectify":"^4.0.0","@google-cloud/promisify":"<4.1.0","abort-controller":"^3.0.0","async-retry":"^1.3.3","duplexify":"^4.1.3","fast-xml-parser":"^4.4.1","gaxios":"^6.0.2","google-auth-library":"^9.6.3","html-entities":"^2.5.2","mime":"^3.0.0","p-limit":"^3.0.1","retry-request":"^7.0.0","teeny-request":"^9.0.0","uuid":"^8.0.0"},"devDependencies":{"@babel/cli":"^7.22.10","@babel/core":"^7.22.11","@google-cloud/pubsub":"^4.0.0","@grpc/grpc-js":"^1.0.3","@grpc/proto-loader":"^0.7.0","@types/async-retry":"^1.4.3","@types/duplexify":"^3.6.4","@types/mime":"^3.0.0","@types/mocha":"^9.1.1","@types/mockery":"^1.4.29","@types/node":"^22.0.0","@types/node-fetch":"^2.1.3","@types/proxyquire":"^1.3.28","@types/request":"^2.48.4","@types/sinon":"^17.0.0","@types/tmp":"0.2.6","@types/uuid":"^8.0.0","@types/yargs":"^17.0.10","c8":"^9.0.0","form-data":"^4.0.0","gapic-tools":"^0.4.0","gts":"^5.0.0","jsdoc":"^4.0.0","jsdoc-fresh":"^3.0.0","jsdoc-region-tag":"^3.0.0","linkinator":"^3.0.0","mocha":"^9.2.2","mockery":"^2.1.0","nock":"~13.5.0","node-fetch":"^2.6.7","pack-n-play":"^2.0.0","proxyquire":"^2.1.3","sinon":"^18.0.0","nise":"6.0.0","path-to-regexp":"6.3.0","tmp":"^0.2.0","typescript":"^5.1.6","yargs":"^17.3.1"}}'); + +/***/ }), + +/***/ 6495: +/***/ ((module) => { + +"use strict"; +module.exports = /*#__PURE__*/JSON.parse('{"name":"gaxios","version":"6.7.1","description":"A simple common HTTP client specifically for Google APIs and services.","main":"build/src/index.js","types":"build/src/index.d.ts","files":["build/src"],"scripts":{"lint":"gts check","test":"c8 mocha build/test","presystem-test":"npm run compile","system-test":"mocha build/system-test --timeout 80000","compile":"tsc -p .","fix":"gts fix","prepare":"npm run compile","pretest":"npm run compile","webpack":"webpack","prebrowser-test":"npm run compile","browser-test":"node build/browser-test/browser-test-runner.js","docs":"compodoc src/","docs-test":"linkinator docs","predocs-test":"npm run docs","samples-test":"cd samples/ && npm link ../ && npm test && cd ../","prelint":"cd samples; npm link ../; npm install","clean":"gts clean","precompile":"gts clean"},"repository":"googleapis/gaxios","keywords":["google"],"engines":{"node":">=14"},"author":"Google, LLC","license":"Apache-2.0","devDependencies":{"@babel/plugin-proposal-private-methods":"^7.18.6","@compodoc/compodoc":"1.1.19","@types/cors":"^2.8.6","@types/express":"^4.16.1","@types/extend":"^3.0.1","@types/mocha":"^9.0.0","@types/multiparty":"0.0.36","@types/mv":"^2.1.0","@types/ncp":"^2.0.1","@types/node":"^20.0.0","@types/node-fetch":"^2.5.7","@types/sinon":"^17.0.0","@types/tmp":"0.2.6","@types/uuid":"^10.0.0","abort-controller":"^3.0.0","assert":"^2.0.0","browserify":"^17.0.0","c8":"^8.0.0","cheerio":"1.0.0-rc.10","cors":"^2.8.5","execa":"^5.0.0","express":"^4.16.4","form-data":"^4.0.0","gts":"^5.0.0","is-docker":"^2.0.0","karma":"^6.0.0","karma-chrome-launcher":"^3.0.0","karma-coverage":"^2.0.0","karma-firefox-launcher":"^2.0.0","karma-mocha":"^2.0.0","karma-remap-coverage":"^0.1.5","karma-sourcemap-loader":"^0.4.0","karma-webpack":"5.0.0","linkinator":"^3.0.0","mocha":"^8.0.0","multiparty":"^4.2.1","mv":"^2.1.1","ncp":"^2.0.0","nock":"^13.0.0","null-loader":"^4.0.0","puppeteer":"^19.0.0","sinon":"^18.0.0","stream-browserify":"^3.0.0","tmp":"0.2.3","ts-loader":"^8.0.0","typescript":"^5.1.6","webpack":"^5.35.0","webpack-cli":"^4.0.0"},"dependencies":{"extend":"^3.0.2","https-proxy-agent":"^7.0.1","is-stream":"^2.0.0","node-fetch":"^2.6.9","uuid":"^9.0.1"}}'); + +/***/ }), + +/***/ 6066: +/***/ ((module) => { + +"use strict"; +module.exports = /*#__PURE__*/JSON.parse('{"name":"google-auth-library","version":"9.15.1","author":"Google Inc.","description":"Google APIs Authentication Client Library for Node.js","engines":{"node":">=14"},"main":"./build/src/index.js","types":"./build/src/index.d.ts","repository":"googleapis/google-auth-library-nodejs.git","keywords":["google","api","google apis","client","client library"],"dependencies":{"base64-js":"^1.3.0","ecdsa-sig-formatter":"^1.0.11","gaxios":"^6.1.1","gcp-metadata":"^6.1.0","gtoken":"^7.0.0","jws":"^4.0.0"},"devDependencies":{"@types/base64-js":"^1.2.5","@types/chai":"^4.1.7","@types/jws":"^3.1.0","@types/mocha":"^9.0.0","@types/mv":"^2.1.0","@types/ncp":"^2.0.1","@types/node":"^20.4.2","@types/sinon":"^17.0.0","assert-rejects":"^1.0.0","c8":"^8.0.0","chai":"^4.2.0","cheerio":"1.0.0-rc.12","codecov":"^3.0.2","engine.io":"6.6.2","gts":"^5.0.0","is-docker":"^2.0.0","jsdoc":"^4.0.0","jsdoc-fresh":"^3.0.0","jsdoc-region-tag":"^3.0.0","karma":"^6.0.0","karma-chrome-launcher":"^3.0.0","karma-coverage":"^2.0.0","karma-firefox-launcher":"^2.0.0","karma-mocha":"^2.0.0","karma-sourcemap-loader":"^0.4.0","karma-webpack":"5.0.0","keypair":"^1.0.4","linkinator":"^4.0.0","mocha":"^9.2.2","mv":"^2.1.1","ncp":"^2.0.0","nock":"^13.0.0","null-loader":"^4.0.0","pdfmake":"0.2.12","puppeteer":"^21.0.0","sinon":"^18.0.0","ts-loader":"^8.0.0","typescript":"^5.1.6","webpack":"^5.21.2","webpack-cli":"^4.0.0"},"files":["build/src","!build/src/**/*.map"],"scripts":{"test":"c8 mocha build/test","clean":"gts clean","prepare":"npm run compile","lint":"gts check","compile":"tsc -p .","fix":"gts fix","pretest":"npm run compile -- --sourceMap","docs":"jsdoc -c .jsdoc.json","samples-setup":"cd samples/ && npm link ../ && npm run setup && cd ../","samples-test":"cd samples/ && npm link ../ && npm test && cd ../","system-test":"mocha build/system-test --timeout 60000","presystem-test":"npm run compile -- --sourceMap","webpack":"webpack","browser-test":"karma start","docs-test":"linkinator docs","predocs-test":"npm run docs","prelint":"cd samples; npm link ../; npm install","precompile":"gts clean"},"license":"Apache-2.0"}'); + +/***/ }), + /***/ 1813: /***/ ((module) => { diff --git a/dist/save-only/index.js b/dist/save-only/index.js index 1e1b222..83d914a 100644 --- a/dist/save-only/index.js +++ b/dist/save-only/index.js @@ -7792,7 +7792,7 @@ exports.isTokenCredential = isTokenCredential; Object.defineProperty(exports, "__esModule", ({ value: true })); -var uuid = __nccwpck_require__(3534); +var uuid = __nccwpck_require__(2048); var util = __nccwpck_require__(9023); var tslib = __nccwpck_require__(470); var xml2js = __nccwpck_require__(758); @@ -14157,652 +14157,6 @@ var __createBinding; }); -/***/ }), - -/***/ 3534: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -Object.defineProperty(exports, "v1", ({ - enumerable: true, - get: function () { - return _v.default; - } -})); -Object.defineProperty(exports, "v3", ({ - enumerable: true, - get: function () { - return _v2.default; - } -})); -Object.defineProperty(exports, "v4", ({ - enumerable: true, - get: function () { - return _v3.default; - } -})); -Object.defineProperty(exports, "v5", ({ - enumerable: true, - get: function () { - return _v4.default; - } -})); -Object.defineProperty(exports, "NIL", ({ - enumerable: true, - get: function () { - return _nil.default; - } -})); -Object.defineProperty(exports, "version", ({ - enumerable: true, - get: function () { - return _version.default; - } -})); -Object.defineProperty(exports, "validate", ({ - enumerable: true, - get: function () { - return _validate.default; - } -})); -Object.defineProperty(exports, "stringify", ({ - enumerable: true, - get: function () { - return _stringify.default; - } -})); -Object.defineProperty(exports, "parse", ({ - enumerable: true, - get: function () { - return _parse.default; - } -})); - -var _v = _interopRequireDefault(__nccwpck_require__(3417)); - -var _v2 = _interopRequireDefault(__nccwpck_require__(2855)); - -var _v3 = _interopRequireDefault(__nccwpck_require__(7974)); - -var _v4 = _interopRequireDefault(__nccwpck_require__(6165)); - -var _nil = _interopRequireDefault(__nccwpck_require__(7441)); - -var _version = _interopRequireDefault(__nccwpck_require__(5962)); - -var _validate = _interopRequireDefault(__nccwpck_require__(1690)); - -var _stringify = _interopRequireDefault(__nccwpck_require__(9651)); - -var _parse = _interopRequireDefault(__nccwpck_require__(6221)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/***/ }), - -/***/ 7370: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function md5(bytes) { - if (Array.isArray(bytes)) { - bytes = Buffer.from(bytes); - } else if (typeof bytes === 'string') { - bytes = Buffer.from(bytes, 'utf8'); - } - - return _crypto.default.createHash('md5').update(bytes).digest(); -} - -var _default = md5; -exports["default"] = _default; - -/***/ }), - -/***/ 7441: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; -var _default = '00000000-0000-0000-0000-000000000000'; -exports["default"] = _default; - -/***/ }), - -/***/ 6221: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _validate = _interopRequireDefault(__nccwpck_require__(1690)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function parse(uuid) { - if (!(0, _validate.default)(uuid)) { - throw TypeError('Invalid UUID'); - } - - let v; - const arr = new Uint8Array(16); // Parse ########-....-....-....-............ - - arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; - arr[1] = v >>> 16 & 0xff; - arr[2] = v >>> 8 & 0xff; - arr[3] = v & 0xff; // Parse ........-####-....-....-............ - - arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; - arr[5] = v & 0xff; // Parse ........-....-####-....-............ - - arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; - arr[7] = v & 0xff; // Parse ........-....-....-####-............ - - arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; - arr[9] = v & 0xff; // Parse ........-....-....-....-############ - // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) - - arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; - arr[11] = v / 0x100000000 & 0xff; - arr[12] = v >>> 24 & 0xff; - arr[13] = v >>> 16 & 0xff; - arr[14] = v >>> 8 & 0xff; - arr[15] = v & 0xff; - return arr; -} - -var _default = parse; -exports["default"] = _default; - -/***/ }), - -/***/ 8689: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; -var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; -exports["default"] = _default; - -/***/ }), - -/***/ 6299: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = rng; - -var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate - -let poolPtr = rnds8Pool.length; - -function rng() { - if (poolPtr > rnds8Pool.length - 16) { - _crypto.default.randomFillSync(rnds8Pool); - - poolPtr = 0; - } - - return rnds8Pool.slice(poolPtr, poolPtr += 16); -} - -/***/ }), - -/***/ 8821: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function sha1(bytes) { - if (Array.isArray(bytes)) { - bytes = Buffer.from(bytes); - } else if (typeof bytes === 'string') { - bytes = Buffer.from(bytes, 'utf8'); - } - - return _crypto.default.createHash('sha1').update(bytes).digest(); -} - -var _default = sha1; -exports["default"] = _default; - -/***/ }), - -/***/ 9651: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _validate = _interopRequireDefault(__nccwpck_require__(1690)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Convert array of 16 byte values to UUID string format of the form: - * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX - */ -const byteToHex = []; - -for (let i = 0; i < 256; ++i) { - byteToHex.push((i + 0x100).toString(16).substr(1)); -} - -function stringify(arr, offset = 0) { - // Note: Be careful editing this code! It's been tuned for performance - // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 - const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one - // of the following: - // - One or more input array values don't map to a hex octet (leading to - // "undefined" in the uuid) - // - Invalid input values for the RFC `version` or `variant` fields - - if (!(0, _validate.default)(uuid)) { - throw TypeError('Stringified UUID is invalid'); - } - - return uuid; -} - -var _default = stringify; -exports["default"] = _default; - -/***/ }), - -/***/ 3417: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _rng = _interopRequireDefault(__nccwpck_require__(6299)); - -var _stringify = _interopRequireDefault(__nccwpck_require__(9651)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -// **`v1()` - Generate time-based UUID** -// -// Inspired by https://github.com/LiosK/UUID.js -// and http://docs.python.org/library/uuid.html -let _nodeId; - -let _clockseq; // Previous uuid creation time - - -let _lastMSecs = 0; -let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details - -function v1(options, buf, offset) { - let i = buf && offset || 0; - const b = buf || new Array(16); - options = options || {}; - let node = options.node || _nodeId; - let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not - // specified. We do this lazily to minimize issues related to insufficient - // system entropy. See #189 - - if (node == null || clockseq == null) { - const seedBytes = options.random || (options.rng || _rng.default)(); - - if (node == null) { - // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) - node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; - } - - if (clockseq == null) { - // Per 4.2.2, randomize (14 bit) clockseq - clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; - } - } // UUID timestamps are 100 nano-second units since the Gregorian epoch, - // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so - // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' - // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. - - - let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock - // cycle to simulate higher resolution clock - - let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) - - const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression - - if (dt < 0 && options.clockseq === undefined) { - clockseq = clockseq + 1 & 0x3fff; - } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new - // time interval - - - if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { - nsecs = 0; - } // Per 4.2.1.2 Throw error if too many uuids are requested - - - if (nsecs >= 10000) { - throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); - } - - _lastMSecs = msecs; - _lastNSecs = nsecs; - _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch - - msecs += 12219292800000; // `time_low` - - const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; - b[i++] = tl >>> 24 & 0xff; - b[i++] = tl >>> 16 & 0xff; - b[i++] = tl >>> 8 & 0xff; - b[i++] = tl & 0xff; // `time_mid` - - const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; - b[i++] = tmh >>> 8 & 0xff; - b[i++] = tmh & 0xff; // `time_high_and_version` - - b[i++] = tmh >>> 24 & 0xf | 0x10; // include version - - b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) - - b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` - - b[i++] = clockseq & 0xff; // `node` - - for (let n = 0; n < 6; ++n) { - b[i + n] = node[n]; - } - - return buf || (0, _stringify.default)(b); -} - -var _default = v1; -exports["default"] = _default; - -/***/ }), - -/***/ 2855: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _v = _interopRequireDefault(__nccwpck_require__(8132)); - -var _md = _interopRequireDefault(__nccwpck_require__(7370)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -const v3 = (0, _v.default)('v3', 0x30, _md.default); -var _default = v3; -exports["default"] = _default; - -/***/ }), - -/***/ 8132: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = _default; -exports.URL = exports.DNS = void 0; - -var _stringify = _interopRequireDefault(__nccwpck_require__(9651)); - -var _parse = _interopRequireDefault(__nccwpck_require__(6221)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function stringToBytes(str) { - str = unescape(encodeURIComponent(str)); // UTF8 escape - - const bytes = []; - - for (let i = 0; i < str.length; ++i) { - bytes.push(str.charCodeAt(i)); - } - - return bytes; -} - -const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; -exports.DNS = DNS; -const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; -exports.URL = URL; - -function _default(name, version, hashfunc) { - function generateUUID(value, namespace, buf, offset) { - if (typeof value === 'string') { - value = stringToBytes(value); - } - - if (typeof namespace === 'string') { - namespace = (0, _parse.default)(namespace); - } - - if (namespace.length !== 16) { - throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); - } // Compute hash of namespace and value, Per 4.3 - // Future: Use spread syntax when supported on all platforms, e.g. `bytes = - // hashfunc([...namespace, ... value])` - - - let bytes = new Uint8Array(16 + value.length); - bytes.set(namespace); - bytes.set(value, namespace.length); - bytes = hashfunc(bytes); - bytes[6] = bytes[6] & 0x0f | version; - bytes[8] = bytes[8] & 0x3f | 0x80; - - if (buf) { - offset = offset || 0; - - for (let i = 0; i < 16; ++i) { - buf[offset + i] = bytes[i]; - } - - return buf; - } - - return (0, _stringify.default)(bytes); - } // Function#name is not settable on some platforms (#270) - - - try { - generateUUID.name = name; // eslint-disable-next-line no-empty - } catch (err) {} // For CommonJS default export support - - - generateUUID.DNS = DNS; - generateUUID.URL = URL; - return generateUUID; -} - -/***/ }), - -/***/ 7974: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _rng = _interopRequireDefault(__nccwpck_require__(6299)); - -var _stringify = _interopRequireDefault(__nccwpck_require__(9651)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function v4(options, buf, offset) { - options = options || {}; - - const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` - - - rnds[6] = rnds[6] & 0x0f | 0x40; - rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided - - if (buf) { - offset = offset || 0; - - for (let i = 0; i < 16; ++i) { - buf[offset + i] = rnds[i]; - } - - return buf; - } - - return (0, _stringify.default)(rnds); -} - -var _default = v4; -exports["default"] = _default; - -/***/ }), - -/***/ 6165: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _v = _interopRequireDefault(__nccwpck_require__(8132)); - -var _sha = _interopRequireDefault(__nccwpck_require__(8821)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -const v5 = (0, _v.default)('v5', 0x50, _sha.default); -var _default = v5; -exports["default"] = _default; - -/***/ }), - -/***/ 1690: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _regex = _interopRequireDefault(__nccwpck_require__(8689)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function validate(uuid) { - return typeof uuid === 'string' && _regex.default.test(uuid); -} - -var _default = validate; -exports["default"] = _default; - -/***/ }), - -/***/ 5962: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _validate = _interopRequireDefault(__nccwpck_require__(1690)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function version(uuid) { - if (!(0, _validate.default)(uuid)) { - throw TypeError('Invalid UUID'); - } - - return parseInt(uuid.substr(14, 1), 16); -} - -var _default = version; -exports["default"] = _default; - /***/ }), /***/ 5862: @@ -42639,6 +41993,619 @@ var __createBinding; }); +/***/ }), + +/***/ 9469: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/*! + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ResourceStream = exports.paginator = exports.Paginator = void 0; +/*! + * @module common/paginator + */ +const arrify = __nccwpck_require__(6251); +const extend = __nccwpck_require__(3860); +const resource_stream_1 = __nccwpck_require__(7618); +Object.defineProperty(exports, "ResourceStream", ({ enumerable: true, get: function () { return resource_stream_1.ResourceStream; } })); +/*! Developer Documentation + * + * paginator is used to auto-paginate `nextQuery` methods as well as + * streamifying them. + * + * Before: + * + * search.query('done=true', function(err, results, nextQuery) { + * search.query(nextQuery, function(err, results, nextQuery) {}); + * }); + * + * After: + * + * search.query('done=true', function(err, results) {}); + * + * Methods to extend should be written to accept callbacks and return a + * `nextQuery`. + */ +class Paginator { + /** + * Cache the original method, then overwrite it on the Class's prototype. + * + * @param {function} Class - The parent class of the methods to extend. + * @param {string|string[]} methodNames - Name(s) of the methods to extend. + */ + // tslint:disable-next-line:variable-name + extend(Class, methodNames) { + methodNames = arrify(methodNames); + methodNames.forEach(methodName => { + const originalMethod = Class.prototype[methodName]; + // map the original method to a private member + Class.prototype[methodName + '_'] = originalMethod; + // overwrite the original to auto-paginate + /* eslint-disable @typescript-eslint/no-explicit-any */ + Class.prototype[methodName] = function (...args) { + const parsedArguments = paginator.parseArguments_(args); + return paginator.run_(parsedArguments, originalMethod.bind(this)); + }; + }); + } + /** + * Wraps paginated API calls in a readable object stream. + * + * This method simply calls the nextQuery recursively, emitting results to a + * stream. The stream ends when `nextQuery` is null. + * + * `maxResults` will act as a cap for how many results are fetched and emitted + * to the stream. + * + * @param {string} methodName - Name of the method to streamify. + * @return {function} - Wrapped function. + */ + /* eslint-disable @typescript-eslint/no-explicit-any */ + streamify(methodName) { + return function ( + /* eslint-disable @typescript-eslint/no-explicit-any */ + ...args) { + const parsedArguments = paginator.parseArguments_(args); + const originalMethod = this[methodName + '_'] || this[methodName]; + return paginator.runAsStream_(parsedArguments, originalMethod.bind(this)); + }; + } + /** + * Parse a pseudo-array `arguments` for a query and callback. + * + * @param {array} args - The original `arguments` pseduo-array that the original + * method received. + */ + /* eslint-disable @typescript-eslint/no-explicit-any */ + parseArguments_(args) { + let query; + let autoPaginate = true; + let maxApiCalls = -1; + let maxResults = -1; + let callback; + const firstArgument = args[0]; + const lastArgument = args[args.length - 1]; + if (typeof firstArgument === 'function') { + callback = firstArgument; + } + else { + query = firstArgument; + } + if (typeof lastArgument === 'function') { + callback = lastArgument; + } + if (typeof query === 'object') { + query = extend(true, {}, query); + // Check if the user only asked for a certain amount of results. + if (query.maxResults && typeof query.maxResults === 'number') { + // `maxResults` is used API-wide. + maxResults = query.maxResults; + } + else if (typeof query.pageSize === 'number') { + // `pageSize` is Pub/Sub's `maxResults`. + maxResults = query.pageSize; + } + if (query.maxApiCalls && typeof query.maxApiCalls === 'number') { + maxApiCalls = query.maxApiCalls; + delete query.maxApiCalls; + } + // maxResults is the user specified limit. + if (maxResults !== -1 || query.autoPaginate === false) { + autoPaginate = false; + } + } + const parsedArguments = { + query: query || {}, + autoPaginate, + maxApiCalls, + maxResults, + callback, + }; + parsedArguments.streamOptions = extend(true, {}, parsedArguments.query); + delete parsedArguments.streamOptions.autoPaginate; + delete parsedArguments.streamOptions.maxResults; + delete parsedArguments.streamOptions.pageSize; + return parsedArguments; + } + /** + * This simply checks to see if `autoPaginate` is set or not, if it's true + * then we buffer all results, otherwise simply call the original method. + * + * @param {array} parsedArguments - Parsed arguments from the original method + * call. + * @param {object=|string=} parsedArguments.query - Query object. This is most + * commonly an object, but to make the API more simple, it can also be a + * string in some places. + * @param {function=} parsedArguments.callback - Callback function. + * @param {boolean} parsedArguments.autoPaginate - Auto-pagination enabled. + * @param {boolean} parsedArguments.maxApiCalls - Maximum API calls to make. + * @param {number} parsedArguments.maxResults - Maximum results to return. + * @param {function} originalMethod - The cached method that accepts a callback + * and returns `nextQuery` to receive more results. + */ + run_(parsedArguments, originalMethod) { + const query = parsedArguments.query; + const callback = parsedArguments.callback; + if (!parsedArguments.autoPaginate) { + return originalMethod(query, callback); + } + const results = new Array(); + let otherArgs = []; + const promise = new Promise((resolve, reject) => { + const stream = paginator.runAsStream_(parsedArguments, originalMethod); + stream + .on('error', reject) + .on('data', (data) => results.push(data)) + .on('end', () => { + otherArgs = stream._otherArgs || []; + resolve(results); + }); + }); + if (!callback) { + return promise.then(results => [results, query, ...otherArgs]); + } + promise.then(results => callback(null, results, query, ...otherArgs), (err) => callback(err)); + } + /** + * This method simply calls the nextQuery recursively, emitting results to a + * stream. The stream ends when `nextQuery` is null. + * + * `maxResults` will act as a cap for how many results are fetched and emitted + * to the stream. + * + * @param {object=|string=} parsedArguments.query - Query object. This is most + * commonly an object, but to make the API more simple, it can also be a + * string in some places. + * @param {function=} parsedArguments.callback - Callback function. + * @param {boolean} parsedArguments.autoPaginate - Auto-pagination enabled. + * @param {boolean} parsedArguments.maxApiCalls - Maximum API calls to make. + * @param {number} parsedArguments.maxResults - Maximum results to return. + * @param {function} originalMethod - The cached method that accepts a callback + * and returns `nextQuery` to receive more results. + * @return {stream} - Readable object stream. + */ + /* eslint-disable @typescript-eslint/no-explicit-any */ + runAsStream_(parsedArguments, originalMethod) { + return new resource_stream_1.ResourceStream(parsedArguments, originalMethod); + } +} +exports.Paginator = Paginator; +const paginator = new Paginator(); +exports.paginator = paginator; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 7618: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/*! + * Copyright 2019 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ResourceStream = void 0; +const stream_1 = __nccwpck_require__(2203); +class ResourceStream extends stream_1.Transform { + constructor(args, requestFn) { + const options = Object.assign({ objectMode: true }, args.streamOptions); + super(options); + this._ended = false; + this._maxApiCalls = args.maxApiCalls === -1 ? Infinity : args.maxApiCalls; + this._nextQuery = args.query; + this._reading = false; + this._requestFn = requestFn; + this._requestsMade = 0; + this._resultsToSend = args.maxResults === -1 ? Infinity : args.maxResults; + this._otherArgs = []; + } + /* eslint-disable @typescript-eslint/no-explicit-any */ + end(...args) { + this._ended = true; + return super.end(...args); + } + _read() { + if (this._reading) { + return; + } + this._reading = true; + // Wrap in a try/catch to catch input linting errors, e.g. + // an invalid BigQuery query. These errors are thrown in an + // async fashion, which makes them un-catchable by the user. + try { + this._requestFn(this._nextQuery, (err, results, nextQuery, ...otherArgs) => { + if (err) { + this.destroy(err); + return; + } + this._otherArgs = otherArgs; + this._nextQuery = nextQuery; + if (this._resultsToSend !== Infinity) { + results = results.splice(0, this._resultsToSend); + this._resultsToSend -= results.length; + } + let more = true; + for (const result of results) { + if (this._ended) { + break; + } + more = this.push(result); + } + const isFinished = !this._nextQuery || this._resultsToSend < 1; + const madeMaxCalls = ++this._requestsMade >= this._maxApiCalls; + if (isFinished || madeMaxCalls) { + this.end(); + } + if (more && !this._ended) { + setImmediate(() => this._read()); + } + this._reading = false; + }); + } + catch (e) { + this.destroy(e); + } + } +} +exports.ResourceStream = ResourceStream; +//# sourceMappingURL=resource-stream.js.map + +/***/ }), + +/***/ 7635: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.MissingProjectIdError = exports.replaceProjectIdToken = void 0; +const stream_1 = __nccwpck_require__(2203); +// Copyright 2014 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +/** + * Populate the `{{projectId}}` placeholder. + * + * @throws {Error} If a projectId is required, but one is not provided. + * + * @param {*} - Any input value that may contain a placeholder. Arrays and objects will be looped. + * @param {string} projectId - A projectId. If not provided + * @return {*} - The original argument with all placeholders populated. + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function replaceProjectIdToken(value, projectId) { + if (Array.isArray(value)) { + value = value.map(v => replaceProjectIdToken(v, projectId)); + } + if (value !== null && + typeof value === 'object' && + !(value instanceof Buffer) && + !(value instanceof stream_1.Stream) && + typeof value.hasOwnProperty === 'function') { + for (const opt in value) { + // eslint-disable-next-line no-prototype-builtins + if (value.hasOwnProperty(opt)) { + value[opt] = replaceProjectIdToken(value[opt], projectId); + } + } + } + if (typeof value === 'string' && + value.indexOf('{{projectId}}') > -1) { + if (!projectId || projectId === '{{projectId}}') { + throw new MissingProjectIdError(); + } + value = value.replace(/{{projectId}}/g, projectId); + } + return value; +} +exports.replaceProjectIdToken = replaceProjectIdToken; +/** + * Custom error type for missing project ID errors. + */ +class MissingProjectIdError extends Error { + constructor() { + super(...arguments); + this.message = `Sorry, we cannot connect to Cloud Services without a project + ID. You may specify one with an environment variable named + "GOOGLE_CLOUD_PROJECT".`.replace(/ +/g, ' '); + } +} +exports.MissingProjectIdError = MissingProjectIdError; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 206: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* eslint-disable prefer-rest-params */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.callbackifyAll = exports.callbackify = exports.promisifyAll = exports.promisify = void 0; +/** + * Wraps a callback style function to conditionally return a promise. + * + * @param {function} originalMethod - The method to promisify. + * @param {object=} options - Promise options. + * @param {boolean} options.singular - Resolve the promise with single arg instead of an array. + * @return {function} wrapped + */ +function promisify(originalMethod, options) { + if (originalMethod.promisified_) { + return originalMethod; + } + options = options || {}; + const slice = Array.prototype.slice; + // tslint:disable-next-line:no-any + const wrapper = function () { + let last; + for (last = arguments.length - 1; last >= 0; last--) { + const arg = arguments[last]; + if (typeof arg === 'undefined') { + continue; // skip trailing undefined. + } + if (typeof arg !== 'function') { + break; // non-callback last argument found. + } + return originalMethod.apply(this, arguments); + } + // peel trailing undefined. + const args = slice.call(arguments, 0, last + 1); + // tslint:disable-next-line:variable-name + let PromiseCtor = Promise; + // Because dedupe will likely create a single install of + // @google-cloud/common to be shared amongst all modules, we need to + // localize it at the Service level. + if (this && this.Promise) { + PromiseCtor = this.Promise; + } + return new PromiseCtor((resolve, reject) => { + // tslint:disable-next-line:no-any + args.push((...args) => { + const callbackArgs = slice.call(args); + const err = callbackArgs.shift(); + if (err) { + return reject(err); + } + if (options.singular && callbackArgs.length === 1) { + resolve(callbackArgs[0]); + } + else { + resolve(callbackArgs); + } + }); + originalMethod.apply(this, args); + }); + }; + wrapper.promisified_ = true; + return wrapper; +} +exports.promisify = promisify; +/** + * Promisifies certain Class methods. This will not promisify private or + * streaming methods. + * + * @param {module:common/service} Class - Service class. + * @param {object=} options - Configuration object. + */ +// tslint:disable-next-line:variable-name +function promisifyAll(Class, options) { + const exclude = (options && options.exclude) || []; + const ownPropertyNames = Object.getOwnPropertyNames(Class.prototype); + const methods = ownPropertyNames.filter(methodName => { + // clang-format off + return (!exclude.includes(methodName) && + typeof Class.prototype[methodName] === 'function' && // is it a function? + !/(^_|(Stream|_)|promise$)|^constructor$/.test(methodName) // is it promisable? + ); + // clang-format on + }); + methods.forEach(methodName => { + const originalMethod = Class.prototype[methodName]; + if (!originalMethod.promisified_) { + Class.prototype[methodName] = exports.promisify(originalMethod, options); + } + }); +} +exports.promisifyAll = promisifyAll; +/** + * Wraps a promisy type function to conditionally call a callback function. + * + * @param {function} originalMethod - The method to callbackify. + * @param {object=} options - Callback options. + * @param {boolean} options.singular - Pass to the callback a single arg instead of an array. + * @return {function} wrapped + */ +function callbackify(originalMethod) { + if (originalMethod.callbackified_) { + return originalMethod; + } + // tslint:disable-next-line:no-any + const wrapper = function () { + if (typeof arguments[arguments.length - 1] !== 'function') { + return originalMethod.apply(this, arguments); + } + const cb = Array.prototype.pop.call(arguments); + originalMethod.apply(this, arguments).then( + // tslint:disable-next-line:no-any + (res) => { + res = Array.isArray(res) ? res : [res]; + cb(null, ...res); + }, (err) => cb(err)); + }; + wrapper.callbackified_ = true; + return wrapper; +} +exports.callbackify = callbackify; +/** + * Callbackifies certain Class methods. This will not callbackify private or + * streaming methods. + * + * @param {module:common/service} Class - Service class. + * @param {object=} options - Configuration object. + */ +function callbackifyAll( +// tslint:disable-next-line:variable-name +Class, options) { + const exclude = (options && options.exclude) || []; + const ownPropertyNames = Object.getOwnPropertyNames(Class.prototype); + const methods = ownPropertyNames.filter(methodName => { + // clang-format off + return (!exclude.includes(methodName) && + typeof Class.prototype[methodName] === 'function' && // is it a function? + !/^_|(Stream|_)|^constructor$/.test(methodName) // is it callbackifyable? + ); + // clang-format on + }); + methods.forEach(methodName => { + const originalMethod = Class.prototype[methodName]; + if (!originalMethod.callbackified_) { + Class.prototype[methodName] = exports.callbackify(originalMethod); + } + }); +} +exports.callbackifyAll = callbackifyAll; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 9977: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +const Queue = __nccwpck_require__(538); + +const pLimit = concurrency => { + if (!((Number.isInteger(concurrency) || concurrency === Infinity) && concurrency > 0)) { + throw new TypeError('Expected `concurrency` to be a number from 1 and up'); + } + + const queue = new Queue(); + let activeCount = 0; + + const next = () => { + activeCount--; + + if (queue.size > 0) { + queue.dequeue()(); + } + }; + + const run = async (fn, resolve, ...args) => { + activeCount++; + + const result = (async () => fn(...args))(); + + resolve(result); + + try { + await result; + } catch {} + + next(); + }; + + const enqueue = (fn, resolve, ...args) => { + queue.enqueue(run.bind(null, fn, resolve, ...args)); + + (async () => { + // This function needs to wait until the next microtask before comparing + // `activeCount` to `concurrency`, because `activeCount` is updated asynchronously + // when the run function is dequeued and called. The comparison in the if-statement + // needs to happen asynchronously as well to get an up-to-date value for `activeCount`. + await Promise.resolve(); + + if (activeCount < concurrency && queue.size > 0) { + queue.dequeue()(); + } + })(); + }; + + const generator = (fn, ...args) => new Promise(resolve => { + enqueue(fn, resolve, ...args); + }); + + Object.defineProperties(generator, { + activeCount: { + get: () => activeCount + }, + pendingCount: { + get: () => queue.size + }, + clearQueue: { + value: () => { + queue.clear(); + } + } + }); + + return generator; +}; + +module.exports = pLimit; + + /***/ }), /***/ 9750: @@ -42751,7 +42718,7 @@ exports.ContextAPI = ContextAPI; */ Object.defineProperty(exports, "__esModule", ({ value: true })); exports.DiagAPI = void 0; -const ComponentLogger_1 = __nccwpck_require__(7723); +const ComponentLogger_1 = __nccwpck_require__(104); const logLevelLogger_1 = __nccwpck_require__(3514); const types_1 = __nccwpck_require__(2573); const global_utils_1 = __nccwpck_require__(9923); @@ -43473,7 +43440,7 @@ exports.diag = diag_1.DiagAPI.instance(); /***/ }), -/***/ 7723: +/***/ 104: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -50282,6 +50249,531 @@ class ReflectionTypeCheck { exports.ReflectionTypeCheck = ReflectionTypeCheck; +/***/ }), + +/***/ 8662: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +function once(emitter, name, { signal } = {}) { + return new Promise((resolve, reject) => { + function cleanup() { + signal === null || signal === void 0 ? void 0 : signal.removeEventListener('abort', cleanup); + emitter.removeListener(name, onEvent); + emitter.removeListener('error', onError); + } + function onEvent(...args) { + cleanup(); + resolve(args); + } + function onError(err) { + cleanup(); + reject(err); + } + signal === null || signal === void 0 ? void 0 : signal.addEventListener('abort', cleanup); + emitter.on(name, onEvent); + emitter.on('error', onError); + }); +} +exports["default"] = once; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 7413: +/***/ ((module, exports, __nccwpck_require__) => { + +"use strict"; +/** + * @author Toru Nagashima + * See LICENSE file in root directory for full license. + */ + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +var eventTargetShim = __nccwpck_require__(6577); + +/** + * The signal class. + * @see https://dom.spec.whatwg.org/#abortsignal + */ +class AbortSignal extends eventTargetShim.EventTarget { + /** + * AbortSignal cannot be constructed directly. + */ + constructor() { + super(); + throw new TypeError("AbortSignal cannot be constructed directly"); + } + /** + * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise. + */ + get aborted() { + const aborted = abortedFlags.get(this); + if (typeof aborted !== "boolean") { + throw new TypeError(`Expected 'this' to be an 'AbortSignal' object, but got ${this === null ? "null" : typeof this}`); + } + return aborted; + } +} +eventTargetShim.defineEventAttribute(AbortSignal.prototype, "abort"); +/** + * Create an AbortSignal object. + */ +function createAbortSignal() { + const signal = Object.create(AbortSignal.prototype); + eventTargetShim.EventTarget.call(signal); + abortedFlags.set(signal, false); + return signal; +} +/** + * Abort a given signal. + */ +function abortSignal(signal) { + if (abortedFlags.get(signal) !== false) { + return; + } + abortedFlags.set(signal, true); + signal.dispatchEvent({ type: "abort" }); +} +/** + * Aborted flag for each instances. + */ +const abortedFlags = new WeakMap(); +// Properties should be enumerable. +Object.defineProperties(AbortSignal.prototype, { + aborted: { enumerable: true }, +}); +// `toString()` should return `"[object AbortSignal]"` +if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, { + configurable: true, + value: "AbortSignal", + }); +} + +/** + * The AbortController. + * @see https://dom.spec.whatwg.org/#abortcontroller + */ +class AbortController { + /** + * Initialize this controller. + */ + constructor() { + signals.set(this, createAbortSignal()); + } + /** + * Returns the `AbortSignal` object associated with this object. + */ + get signal() { + return getSignal(this); + } + /** + * Abort and signal to any observers that the associated activity is to be aborted. + */ + abort() { + abortSignal(getSignal(this)); + } +} +/** + * Associated signals. + */ +const signals = new WeakMap(); +/** + * Get the associated signal of a given controller. + */ +function getSignal(controller) { + const signal = signals.get(controller); + if (signal == null) { + throw new TypeError(`Expected 'this' to be an 'AbortController' object, but got ${controller === null ? "null" : typeof controller}`); + } + return signal; +} +// Properties should be enumerable. +Object.defineProperties(AbortController.prototype, { + signal: { enumerable: true }, + abort: { enumerable: true }, +}); +if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(AbortController.prototype, Symbol.toStringTag, { + configurable: true, + value: "AbortController", + }); +} + +exports.AbortController = AbortController; +exports.AbortSignal = AbortSignal; +exports["default"] = AbortController; + +module.exports = AbortController +module.exports.AbortController = module.exports["default"] = AbortController +module.exports.AbortSignal = AbortSignal +//# sourceMappingURL=abort-controller.js.map + + +/***/ }), + +/***/ 5183: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.req = exports.json = exports.toBuffer = void 0; +const http = __importStar(__nccwpck_require__(8611)); +const https = __importStar(__nccwpck_require__(5692)); +async function toBuffer(stream) { + let length = 0; + const chunks = []; + for await (const chunk of stream) { + length += chunk.length; + chunks.push(chunk); + } + return Buffer.concat(chunks, length); +} +exports.toBuffer = toBuffer; +// eslint-disable-next-line @typescript-eslint/no-explicit-any +async function json(stream) { + const buf = await toBuffer(stream); + const str = buf.toString('utf8'); + try { + return JSON.parse(str); + } + catch (_err) { + const err = _err; + err.message += ` (input: ${str})`; + throw err; + } +} +exports.json = json; +function req(url, opts = {}) { + const href = typeof url === 'string' ? url : url.href; + const req = (href.startsWith('https:') ? https : http).request(url, opts); + const promise = new Promise((resolve, reject) => { + req + .once('response', resolve) + .once('error', reject) + .end(); + }); + req.then = promise.then.bind(promise); + return req; +} +exports.req = req; +//# sourceMappingURL=helpers.js.map + +/***/ }), + +/***/ 8894: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Agent = void 0; +const net = __importStar(__nccwpck_require__(9278)); +const http = __importStar(__nccwpck_require__(8611)); +const https_1 = __nccwpck_require__(5692); +__exportStar(__nccwpck_require__(5183), exports); +const INTERNAL = Symbol('AgentBaseInternalState'); +class Agent extends http.Agent { + constructor(opts) { + super(opts); + this[INTERNAL] = {}; + } + /** + * Determine whether this is an `http` or `https` request. + */ + isSecureEndpoint(options) { + if (options) { + // First check the `secureEndpoint` property explicitly, since this + // means that a parent `Agent` is "passing through" to this instance. + // eslint-disable-next-line @typescript-eslint/no-explicit-any + if (typeof options.secureEndpoint === 'boolean') { + return options.secureEndpoint; + } + // If no explicit `secure` endpoint, check if `protocol` property is + // set. This will usually be the case since using a full string URL + // or `URL` instance should be the most common usage. + if (typeof options.protocol === 'string') { + return options.protocol === 'https:'; + } + } + // Finally, if no `protocol` property was set, then fall back to + // checking the stack trace of the current call stack, and try to + // detect the "https" module. + const { stack } = new Error(); + if (typeof stack !== 'string') + return false; + return stack + .split('\n') + .some((l) => l.indexOf('(https.js:') !== -1 || + l.indexOf('node:https:') !== -1); + } + // In order to support async signatures in `connect()` and Node's native + // connection pooling in `http.Agent`, the array of sockets for each origin + // has to be updated synchronously. This is so the length of the array is + // accurate when `addRequest()` is next called. We achieve this by creating a + // fake socket and adding it to `sockets[origin]` and incrementing + // `totalSocketCount`. + incrementSockets(name) { + // If `maxSockets` and `maxTotalSockets` are both Infinity then there is no + // need to create a fake socket because Node.js native connection pooling + // will never be invoked. + if (this.maxSockets === Infinity && this.maxTotalSockets === Infinity) { + return null; + } + // All instances of `sockets` are expected TypeScript errors. The + // alternative is to add it as a private property of this class but that + // will break TypeScript subclassing. + if (!this.sockets[name]) { + // @ts-expect-error `sockets` is readonly in `@types/node` + this.sockets[name] = []; + } + const fakeSocket = new net.Socket({ writable: false }); + this.sockets[name].push(fakeSocket); + // @ts-expect-error `totalSocketCount` isn't defined in `@types/node` + this.totalSocketCount++; + return fakeSocket; + } + decrementSockets(name, socket) { + if (!this.sockets[name] || socket === null) { + return; + } + const sockets = this.sockets[name]; + const index = sockets.indexOf(socket); + if (index !== -1) { + sockets.splice(index, 1); + // @ts-expect-error `totalSocketCount` isn't defined in `@types/node` + this.totalSocketCount--; + if (sockets.length === 0) { + // @ts-expect-error `sockets` is readonly in `@types/node` + delete this.sockets[name]; + } + } + } + // In order to properly update the socket pool, we need to call `getName()` on + // the core `https.Agent` if it is a secureEndpoint. + getName(options) { + const secureEndpoint = typeof options.secureEndpoint === 'boolean' + ? options.secureEndpoint + : this.isSecureEndpoint(options); + if (secureEndpoint) { + // @ts-expect-error `getName()` isn't defined in `@types/node` + return https_1.Agent.prototype.getName.call(this, options); + } + // @ts-expect-error `getName()` isn't defined in `@types/node` + return super.getName(options); + } + createSocket(req, options, cb) { + const connectOpts = { + ...options, + secureEndpoint: this.isSecureEndpoint(options), + }; + const name = this.getName(connectOpts); + const fakeSocket = this.incrementSockets(name); + Promise.resolve() + .then(() => this.connect(req, connectOpts)) + .then((socket) => { + this.decrementSockets(name, fakeSocket); + if (socket instanceof http.Agent) { + try { + // @ts-expect-error `addRequest()` isn't defined in `@types/node` + return socket.addRequest(req, connectOpts); + } + catch (err) { + return cb(err); + } + } + this[INTERNAL].currentSocket = socket; + // @ts-expect-error `createSocket()` isn't defined in `@types/node` + super.createSocket(req, options, cb); + }, (err) => { + this.decrementSockets(name, fakeSocket); + cb(err); + }); + } + createConnection() { + const socket = this[INTERNAL].currentSocket; + this[INTERNAL].currentSocket = undefined; + if (!socket) { + throw new Error('No socket was returned in the `connect()` function'); + } + return socket; + } + get defaultPort() { + return (this[INTERNAL].defaultPort ?? + (this.protocol === 'https:' ? 443 : 80)); + } + set defaultPort(v) { + if (this[INTERNAL]) { + this[INTERNAL].defaultPort = v; + } + } + get protocol() { + return (this[INTERNAL].protocol ?? + (this.isSecureEndpoint() ? 'https:' : 'http:')); + } + set protocol(v) { + if (this[INTERNAL]) { + this[INTERNAL].protocol = v; + } + } +} +exports.Agent = Agent; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 6251: +/***/ ((module) => { + +"use strict"; + + +const arrify = value => { + if (value === null || value === undefined) { + return []; + } + + if (Array.isArray(value)) { + return value; + } + + if (typeof value === 'string') { + return [value]; + } + + if (typeof value[Symbol.iterator] === 'function') { + return [...value]; + } + + return [value]; +}; + +module.exports = arrify; + + +/***/ }), + +/***/ 5195: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +// Packages +var retrier = __nccwpck_require__(5546); + +function retry(fn, opts) { + function run(resolve, reject) { + var options = opts || {}; + var op; + + // Default `randomize` to true + if (!('randomize' in options)) { + options.randomize = true; + } + + op = retrier.operation(options); + + // We allow the user to abort retrying + // this makes sense in the cases where + // knowledge is obtained that retrying + // would be futile (e.g.: auth errors) + + function bail(err) { + reject(err || new Error('Aborted')); + } + + function onError(err, num) { + if (err.bail) { + bail(err); + return; + } + + if (!op.retry(err)) { + reject(op.mainError()); + } else if (options.onRetry) { + options.onRetry(err, num); + } + } + + function runAttempt(num) { + var val; + + try { + val = fn(bail, num); + } catch (err) { + onError(err, num); + return; + } + + Promise.resolve(val) + .then(resolve) + .catch(function catchIt(err) { + onError(err, num); + }); + } + + op.attempt(runAttempt); + } + + return new Promise(run); +} + +module.exports = retry; + + /***/ }), /***/ 1324: @@ -50793,6 +51285,3093 @@ function range(a, b, str) { } +/***/ }), + +/***/ 8793: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +exports.byteLength = byteLength +exports.toByteArray = toByteArray +exports.fromByteArray = fromByteArray + +var lookup = [] +var revLookup = [] +var Arr = typeof Uint8Array !== 'undefined' ? Uint8Array : Array + +var code = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/' +for (var i = 0, len = code.length; i < len; ++i) { + lookup[i] = code[i] + revLookup[code.charCodeAt(i)] = i +} + +// Support decoding URL-safe base64 strings, as Node.js does. +// See: https://en.wikipedia.org/wiki/Base64#URL_applications +revLookup['-'.charCodeAt(0)] = 62 +revLookup['_'.charCodeAt(0)] = 63 + +function getLens (b64) { + var len = b64.length + + if (len % 4 > 0) { + throw new Error('Invalid string. Length must be a multiple of 4') + } + + // Trim off extra bytes after placeholder bytes are found + // See: https://github.com/beatgammit/base64-js/issues/42 + var validLen = b64.indexOf('=') + if (validLen === -1) validLen = len + + var placeHoldersLen = validLen === len + ? 0 + : 4 - (validLen % 4) + + return [validLen, placeHoldersLen] +} + +// base64 is 4/3 + up to two characters of the original data +function byteLength (b64) { + var lens = getLens(b64) + var validLen = lens[0] + var placeHoldersLen = lens[1] + return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen +} + +function _byteLength (b64, validLen, placeHoldersLen) { + return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen +} + +function toByteArray (b64) { + var tmp + var lens = getLens(b64) + var validLen = lens[0] + var placeHoldersLen = lens[1] + + var arr = new Arr(_byteLength(b64, validLen, placeHoldersLen)) + + var curByte = 0 + + // if there are placeholders, only get up to the last complete 4 chars + var len = placeHoldersLen > 0 + ? validLen - 4 + : validLen + + var i + for (i = 0; i < len; i += 4) { + tmp = + (revLookup[b64.charCodeAt(i)] << 18) | + (revLookup[b64.charCodeAt(i + 1)] << 12) | + (revLookup[b64.charCodeAt(i + 2)] << 6) | + revLookup[b64.charCodeAt(i + 3)] + arr[curByte++] = (tmp >> 16) & 0xFF + arr[curByte++] = (tmp >> 8) & 0xFF + arr[curByte++] = tmp & 0xFF + } + + if (placeHoldersLen === 2) { + tmp = + (revLookup[b64.charCodeAt(i)] << 2) | + (revLookup[b64.charCodeAt(i + 1)] >> 4) + arr[curByte++] = tmp & 0xFF + } + + if (placeHoldersLen === 1) { + tmp = + (revLookup[b64.charCodeAt(i)] << 10) | + (revLookup[b64.charCodeAt(i + 1)] << 4) | + (revLookup[b64.charCodeAt(i + 2)] >> 2) + arr[curByte++] = (tmp >> 8) & 0xFF + arr[curByte++] = tmp & 0xFF + } + + return arr +} + +function tripletToBase64 (num) { + return lookup[num >> 18 & 0x3F] + + lookup[num >> 12 & 0x3F] + + lookup[num >> 6 & 0x3F] + + lookup[num & 0x3F] +} + +function encodeChunk (uint8, start, end) { + var tmp + var output = [] + for (var i = start; i < end; i += 3) { + tmp = + ((uint8[i] << 16) & 0xFF0000) + + ((uint8[i + 1] << 8) & 0xFF00) + + (uint8[i + 2] & 0xFF) + output.push(tripletToBase64(tmp)) + } + return output.join('') +} + +function fromByteArray (uint8) { + var tmp + var len = uint8.length + var extraBytes = len % 3 // if we have 1 byte left, pad 2 bytes + var parts = [] + var maxChunkLength = 16383 // must be multiple of 3 + + // go through the array every three bytes, we'll deal with trailing stuff later + for (var i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) { + parts.push(encodeChunk(uint8, i, (i + maxChunkLength) > len2 ? len2 : (i + maxChunkLength))) + } + + // pad the end with zeros, but make sure to not forget the extra bytes + if (extraBytes === 1) { + tmp = uint8[len - 1] + parts.push( + lookup[tmp >> 2] + + lookup[(tmp << 4) & 0x3F] + + '==' + ) + } else if (extraBytes === 2) { + tmp = (uint8[len - 2] << 8) + uint8[len - 1] + parts.push( + lookup[tmp >> 10] + + lookup[(tmp >> 4) & 0x3F] + + lookup[(tmp << 2) & 0x3F] + + '=' + ) + } + + return parts.join('') +} + + +/***/ }), + +/***/ 1259: +/***/ (function(module) { + +;(function (globalObject) { + 'use strict'; + +/* + * bignumber.js v9.2.1 + * A JavaScript library for arbitrary-precision arithmetic. + * https://github.com/MikeMcl/bignumber.js + * Copyright (c) 2025 Michael Mclaughlin + * MIT Licensed. + * + * BigNumber.prototype methods | BigNumber methods + * | + * absoluteValue abs | clone + * comparedTo | config set + * decimalPlaces dp | DECIMAL_PLACES + * dividedBy div | ROUNDING_MODE + * dividedToIntegerBy idiv | EXPONENTIAL_AT + * exponentiatedBy pow | RANGE + * integerValue | CRYPTO + * isEqualTo eq | MODULO_MODE + * isFinite | POW_PRECISION + * isGreaterThan gt | FORMAT + * isGreaterThanOrEqualTo gte | ALPHABET + * isInteger | isBigNumber + * isLessThan lt | maximum max + * isLessThanOrEqualTo lte | minimum min + * isNaN | random + * isNegative | sum + * isPositive | + * isZero | + * minus | + * modulo mod | + * multipliedBy times | + * negated | + * plus | + * precision sd | + * shiftedBy | + * squareRoot sqrt | + * toExponential | + * toFixed | + * toFormat | + * toFraction | + * toJSON | + * toNumber | + * toPrecision | + * toString | + * valueOf | + * + */ + + + var BigNumber, + isNumeric = /^-?(?:\d+(?:\.\d*)?|\.\d+)(?:e[+-]?\d+)?$/i, + mathceil = Math.ceil, + mathfloor = Math.floor, + + bignumberError = '[BigNumber Error] ', + tooManyDigits = bignumberError + 'Number primitive has more than 15 significant digits: ', + + BASE = 1e14, + LOG_BASE = 14, + MAX_SAFE_INTEGER = 0x1fffffffffffff, // 2^53 - 1 + // MAX_INT32 = 0x7fffffff, // 2^31 - 1 + POWS_TEN = [1, 10, 100, 1e3, 1e4, 1e5, 1e6, 1e7, 1e8, 1e9, 1e10, 1e11, 1e12, 1e13], + SQRT_BASE = 1e7, + + // EDITABLE + // The limit on the value of DECIMAL_PLACES, TO_EXP_NEG, TO_EXP_POS, MIN_EXP, MAX_EXP, and + // the arguments to toExponential, toFixed, toFormat, and toPrecision. + MAX = 1E9; // 0 to MAX_INT32 + + + /* + * Create and return a BigNumber constructor. + */ + function clone(configObject) { + var div, convertBase, parseNumeric, + P = BigNumber.prototype = { constructor: BigNumber, toString: null, valueOf: null }, + ONE = new BigNumber(1), + + + //----------------------------- EDITABLE CONFIG DEFAULTS ------------------------------- + + + // The default values below must be integers within the inclusive ranges stated. + // The values can also be changed at run-time using BigNumber.set. + + // The maximum number of decimal places for operations involving division. + DECIMAL_PLACES = 20, // 0 to MAX + + // The rounding mode used when rounding to the above decimal places, and when using + // toExponential, toFixed, toFormat and toPrecision, and round (default value). + // UP 0 Away from zero. + // DOWN 1 Towards zero. + // CEIL 2 Towards +Infinity. + // FLOOR 3 Towards -Infinity. + // HALF_UP 4 Towards nearest neighbour. If equidistant, up. + // HALF_DOWN 5 Towards nearest neighbour. If equidistant, down. + // HALF_EVEN 6 Towards nearest neighbour. If equidistant, towards even neighbour. + // HALF_CEIL 7 Towards nearest neighbour. If equidistant, towards +Infinity. + // HALF_FLOOR 8 Towards nearest neighbour. If equidistant, towards -Infinity. + ROUNDING_MODE = 4, // 0 to 8 + + // EXPONENTIAL_AT : [TO_EXP_NEG , TO_EXP_POS] + + // The exponent value at and beneath which toString returns exponential notation. + // Number type: -7 + TO_EXP_NEG = -7, // 0 to -MAX + + // The exponent value at and above which toString returns exponential notation. + // Number type: 21 + TO_EXP_POS = 21, // 0 to MAX + + // RANGE : [MIN_EXP, MAX_EXP] + + // The minimum exponent value, beneath which underflow to zero occurs. + // Number type: -324 (5e-324) + MIN_EXP = -1e7, // -1 to -MAX + + // The maximum exponent value, above which overflow to Infinity occurs. + // Number type: 308 (1.7976931348623157e+308) + // For MAX_EXP > 1e7, e.g. new BigNumber('1e100000000').plus(1) may be slow. + MAX_EXP = 1e7, // 1 to MAX + + // Whether to use cryptographically-secure random number generation, if available. + CRYPTO = false, // true or false + + // The modulo mode used when calculating the modulus: a mod n. + // The quotient (q = a / n) is calculated according to the corresponding rounding mode. + // The remainder (r) is calculated as: r = a - n * q. + // + // UP 0 The remainder is positive if the dividend is negative, else is negative. + // DOWN 1 The remainder has the same sign as the dividend. + // This modulo mode is commonly known as 'truncated division' and is + // equivalent to (a % n) in JavaScript. + // FLOOR 3 The remainder has the same sign as the divisor (Python %). + // HALF_EVEN 6 This modulo mode implements the IEEE 754 remainder function. + // EUCLID 9 Euclidian division. q = sign(n) * floor(a / abs(n)). + // The remainder is always positive. + // + // The truncated division, floored division, Euclidian division and IEEE 754 remainder + // modes are commonly used for the modulus operation. + // Although the other rounding modes can also be used, they may not give useful results. + MODULO_MODE = 1, // 0 to 9 + + // The maximum number of significant digits of the result of the exponentiatedBy operation. + // If POW_PRECISION is 0, there will be unlimited significant digits. + POW_PRECISION = 0, // 0 to MAX + + // The format specification used by the BigNumber.prototype.toFormat method. + FORMAT = { + prefix: '', + groupSize: 3, + secondaryGroupSize: 0, + groupSeparator: ',', + decimalSeparator: '.', + fractionGroupSize: 0, + fractionGroupSeparator: '\xA0', // non-breaking space + suffix: '' + }, + + // The alphabet used for base conversion. It must be at least 2 characters long, with no '+', + // '-', '.', whitespace, or repeated character. + // '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ$_' + ALPHABET = '0123456789abcdefghijklmnopqrstuvwxyz', + alphabetHasNormalDecimalDigits = true; + + + //------------------------------------------------------------------------------------------ + + + // CONSTRUCTOR + + + /* + * The BigNumber constructor and exported function. + * Create and return a new instance of a BigNumber object. + * + * v {number|string|BigNumber} A numeric value. + * [b] {number} The base of v. Integer, 2 to ALPHABET.length inclusive. + */ + function BigNumber(v, b) { + var alphabet, c, caseChanged, e, i, isNum, len, str, + x = this; + + // Enable constructor call without `new`. + if (!(x instanceof BigNumber)) return new BigNumber(v, b); + + if (b == null) { + + if (v && v._isBigNumber === true) { + x.s = v.s; + + if (!v.c || v.e > MAX_EXP) { + x.c = x.e = null; + } else if (v.e < MIN_EXP) { + x.c = [x.e = 0]; + } else { + x.e = v.e; + x.c = v.c.slice(); + } + + return; + } + + if ((isNum = typeof v == 'number') && v * 0 == 0) { + + // Use `1 / n` to handle minus zero also. + x.s = 1 / v < 0 ? (v = -v, -1) : 1; + + // Fast path for integers, where n < 2147483648 (2**31). + if (v === ~~v) { + for (e = 0, i = v; i >= 10; i /= 10, e++); + + if (e > MAX_EXP) { + x.c = x.e = null; + } else { + x.e = e; + x.c = [v]; + } + + return; + } + + str = String(v); + } else { + + if (!isNumeric.test(str = String(v))) return parseNumeric(x, str, isNum); + + x.s = str.charCodeAt(0) == 45 ? (str = str.slice(1), -1) : 1; + } + + // Decimal point? + if ((e = str.indexOf('.')) > -1) str = str.replace('.', ''); + + // Exponential form? + if ((i = str.search(/e/i)) > 0) { + + // Determine exponent. + if (e < 0) e = i; + e += +str.slice(i + 1); + str = str.substring(0, i); + } else if (e < 0) { + + // Integer. + e = str.length; + } + + } else { + + // '[BigNumber Error] Base {not a primitive number|not an integer|out of range}: {b}' + intCheck(b, 2, ALPHABET.length, 'Base'); + + // Allow exponential notation to be used with base 10 argument, while + // also rounding to DECIMAL_PLACES as with other bases. + if (b == 10 && alphabetHasNormalDecimalDigits) { + x = new BigNumber(v); + return round(x, DECIMAL_PLACES + x.e + 1, ROUNDING_MODE); + } + + str = String(v); + + if (isNum = typeof v == 'number') { + + // Avoid potential interpretation of Infinity and NaN as base 44+ values. + if (v * 0 != 0) return parseNumeric(x, str, isNum, b); + + x.s = 1 / v < 0 ? (str = str.slice(1), -1) : 1; + + // '[BigNumber Error] Number primitive has more than 15 significant digits: {n}' + if (BigNumber.DEBUG && str.replace(/^0\.0*|\./, '').length > 15) { + throw Error + (tooManyDigits + v); + } + } else { + x.s = str.charCodeAt(0) === 45 ? (str = str.slice(1), -1) : 1; + } + + alphabet = ALPHABET.slice(0, b); + e = i = 0; + + // Check that str is a valid base b number. + // Don't use RegExp, so alphabet can contain special characters. + for (len = str.length; i < len; i++) { + if (alphabet.indexOf(c = str.charAt(i)) < 0) { + if (c == '.') { + + // If '.' is not the first character and it has not be found before. + if (i > e) { + e = len; + continue; + } + } else if (!caseChanged) { + + // Allow e.g. hexadecimal 'FF' as well as 'ff'. + if (str == str.toUpperCase() && (str = str.toLowerCase()) || + str == str.toLowerCase() && (str = str.toUpperCase())) { + caseChanged = true; + i = -1; + e = 0; + continue; + } + } + + return parseNumeric(x, String(v), isNum, b); + } + } + + // Prevent later check for length on converted number. + isNum = false; + str = convertBase(str, b, 10, x.s); + + // Decimal point? + if ((e = str.indexOf('.')) > -1) str = str.replace('.', ''); + else e = str.length; + } + + // Determine leading zeros. + for (i = 0; str.charCodeAt(i) === 48; i++); + + // Determine trailing zeros. + for (len = str.length; str.charCodeAt(--len) === 48;); + + if (str = str.slice(i, ++len)) { + len -= i; + + // '[BigNumber Error] Number primitive has more than 15 significant digits: {n}' + if (isNum && BigNumber.DEBUG && + len > 15 && (v > MAX_SAFE_INTEGER || v !== mathfloor(v))) { + throw Error + (tooManyDigits + (x.s * v)); + } + + // Overflow? + if ((e = e - i - 1) > MAX_EXP) { + + // Infinity. + x.c = x.e = null; + + // Underflow? + } else if (e < MIN_EXP) { + + // Zero. + x.c = [x.e = 0]; + } else { + x.e = e; + x.c = []; + + // Transform base + + // e is the base 10 exponent. + // i is where to slice str to get the first element of the coefficient array. + i = (e + 1) % LOG_BASE; + if (e < 0) i += LOG_BASE; // i < 1 + + if (i < len) { + if (i) x.c.push(+str.slice(0, i)); + + for (len -= LOG_BASE; i < len;) { + x.c.push(+str.slice(i, i += LOG_BASE)); + } + + i = LOG_BASE - (str = str.slice(i)).length; + } else { + i -= len; + } + + for (; i--; str += '0'); + x.c.push(+str); + } + } else { + + // Zero. + x.c = [x.e = 0]; + } + } + + + // CONSTRUCTOR PROPERTIES + + + BigNumber.clone = clone; + + BigNumber.ROUND_UP = 0; + BigNumber.ROUND_DOWN = 1; + BigNumber.ROUND_CEIL = 2; + BigNumber.ROUND_FLOOR = 3; + BigNumber.ROUND_HALF_UP = 4; + BigNumber.ROUND_HALF_DOWN = 5; + BigNumber.ROUND_HALF_EVEN = 6; + BigNumber.ROUND_HALF_CEIL = 7; + BigNumber.ROUND_HALF_FLOOR = 8; + BigNumber.EUCLID = 9; + + + /* + * Configure infrequently-changing library-wide settings. + * + * Accept an object with the following optional properties (if the value of a property is + * a number, it must be an integer within the inclusive range stated): + * + * DECIMAL_PLACES {number} 0 to MAX + * ROUNDING_MODE {number} 0 to 8 + * EXPONENTIAL_AT {number|number[]} -MAX to MAX or [-MAX to 0, 0 to MAX] + * RANGE {number|number[]} -MAX to MAX (not zero) or [-MAX to -1, 1 to MAX] + * CRYPTO {boolean} true or false + * MODULO_MODE {number} 0 to 9 + * POW_PRECISION {number} 0 to MAX + * ALPHABET {string} A string of two or more unique characters which does + * not contain '.'. + * FORMAT {object} An object with some of the following properties: + * prefix {string} + * groupSize {number} + * secondaryGroupSize {number} + * groupSeparator {string} + * decimalSeparator {string} + * fractionGroupSize {number} + * fractionGroupSeparator {string} + * suffix {string} + * + * (The values assigned to the above FORMAT object properties are not checked for validity.) + * + * E.g. + * BigNumber.config({ DECIMAL_PLACES : 20, ROUNDING_MODE : 4 }) + * + * Ignore properties/parameters set to null or undefined, except for ALPHABET. + * + * Return an object with the properties current values. + */ + BigNumber.config = BigNumber.set = function (obj) { + var p, v; + + if (obj != null) { + + if (typeof obj == 'object') { + + // DECIMAL_PLACES {number} Integer, 0 to MAX inclusive. + // '[BigNumber Error] DECIMAL_PLACES {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'DECIMAL_PLACES')) { + v = obj[p]; + intCheck(v, 0, MAX, p); + DECIMAL_PLACES = v; + } + + // ROUNDING_MODE {number} Integer, 0 to 8 inclusive. + // '[BigNumber Error] ROUNDING_MODE {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'ROUNDING_MODE')) { + v = obj[p]; + intCheck(v, 0, 8, p); + ROUNDING_MODE = v; + } + + // EXPONENTIAL_AT {number|number[]} + // Integer, -MAX to MAX inclusive or + // [integer -MAX to 0 inclusive, 0 to MAX inclusive]. + // '[BigNumber Error] EXPONENTIAL_AT {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'EXPONENTIAL_AT')) { + v = obj[p]; + if (v && v.pop) { + intCheck(v[0], -MAX, 0, p); + intCheck(v[1], 0, MAX, p); + TO_EXP_NEG = v[0]; + TO_EXP_POS = v[1]; + } else { + intCheck(v, -MAX, MAX, p); + TO_EXP_NEG = -(TO_EXP_POS = v < 0 ? -v : v); + } + } + + // RANGE {number|number[]} Non-zero integer, -MAX to MAX inclusive or + // [integer -MAX to -1 inclusive, integer 1 to MAX inclusive]. + // '[BigNumber Error] RANGE {not a primitive number|not an integer|out of range|cannot be zero}: {v}' + if (obj.hasOwnProperty(p = 'RANGE')) { + v = obj[p]; + if (v && v.pop) { + intCheck(v[0], -MAX, -1, p); + intCheck(v[1], 1, MAX, p); + MIN_EXP = v[0]; + MAX_EXP = v[1]; + } else { + intCheck(v, -MAX, MAX, p); + if (v) { + MIN_EXP = -(MAX_EXP = v < 0 ? -v : v); + } else { + throw Error + (bignumberError + p + ' cannot be zero: ' + v); + } + } + } + + // CRYPTO {boolean} true or false. + // '[BigNumber Error] CRYPTO not true or false: {v}' + // '[BigNumber Error] crypto unavailable' + if (obj.hasOwnProperty(p = 'CRYPTO')) { + v = obj[p]; + if (v === !!v) { + if (v) { + if (typeof crypto != 'undefined' && crypto && + (crypto.getRandomValues || crypto.randomBytes)) { + CRYPTO = v; + } else { + CRYPTO = !v; + throw Error + (bignumberError + 'crypto unavailable'); + } + } else { + CRYPTO = v; + } + } else { + throw Error + (bignumberError + p + ' not true or false: ' + v); + } + } + + // MODULO_MODE {number} Integer, 0 to 9 inclusive. + // '[BigNumber Error] MODULO_MODE {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'MODULO_MODE')) { + v = obj[p]; + intCheck(v, 0, 9, p); + MODULO_MODE = v; + } + + // POW_PRECISION {number} Integer, 0 to MAX inclusive. + // '[BigNumber Error] POW_PRECISION {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'POW_PRECISION')) { + v = obj[p]; + intCheck(v, 0, MAX, p); + POW_PRECISION = v; + } + + // FORMAT {object} + // '[BigNumber Error] FORMAT not an object: {v}' + if (obj.hasOwnProperty(p = 'FORMAT')) { + v = obj[p]; + if (typeof v == 'object') FORMAT = v; + else throw Error + (bignumberError + p + ' not an object: ' + v); + } + + // ALPHABET {string} + // '[BigNumber Error] ALPHABET invalid: {v}' + if (obj.hasOwnProperty(p = 'ALPHABET')) { + v = obj[p]; + + // Disallow if less than two characters, + // or if it contains '+', '-', '.', whitespace, or a repeated character. + if (typeof v == 'string' && !/^.?$|[+\-.\s]|(.).*\1/.test(v)) { + alphabetHasNormalDecimalDigits = v.slice(0, 10) == '0123456789'; + ALPHABET = v; + } else { + throw Error + (bignumberError + p + ' invalid: ' + v); + } + } + + } else { + + // '[BigNumber Error] Object expected: {v}' + throw Error + (bignumberError + 'Object expected: ' + obj); + } + } + + return { + DECIMAL_PLACES: DECIMAL_PLACES, + ROUNDING_MODE: ROUNDING_MODE, + EXPONENTIAL_AT: [TO_EXP_NEG, TO_EXP_POS], + RANGE: [MIN_EXP, MAX_EXP], + CRYPTO: CRYPTO, + MODULO_MODE: MODULO_MODE, + POW_PRECISION: POW_PRECISION, + FORMAT: FORMAT, + ALPHABET: ALPHABET + }; + }; + + + /* + * Return true if v is a BigNumber instance, otherwise return false. + * + * If BigNumber.DEBUG is true, throw if a BigNumber instance is not well-formed. + * + * v {any} + * + * '[BigNumber Error] Invalid BigNumber: {v}' + */ + BigNumber.isBigNumber = function (v) { + if (!v || v._isBigNumber !== true) return false; + if (!BigNumber.DEBUG) return true; + + var i, n, + c = v.c, + e = v.e, + s = v.s; + + out: if ({}.toString.call(c) == '[object Array]') { + + if ((s === 1 || s === -1) && e >= -MAX && e <= MAX && e === mathfloor(e)) { + + // If the first element is zero, the BigNumber value must be zero. + if (c[0] === 0) { + if (e === 0 && c.length === 1) return true; + break out; + } + + // Calculate number of digits that c[0] should have, based on the exponent. + i = (e + 1) % LOG_BASE; + if (i < 1) i += LOG_BASE; + + // Calculate number of digits of c[0]. + //if (Math.ceil(Math.log(c[0] + 1) / Math.LN10) == i) { + if (String(c[0]).length == i) { + + for (i = 0; i < c.length; i++) { + n = c[i]; + if (n < 0 || n >= BASE || n !== mathfloor(n)) break out; + } + + // Last element cannot be zero, unless it is the only element. + if (n !== 0) return true; + } + } + + // Infinity/NaN + } else if (c === null && e === null && (s === null || s === 1 || s === -1)) { + return true; + } + + throw Error + (bignumberError + 'Invalid BigNumber: ' + v); + }; + + + /* + * Return a new BigNumber whose value is the maximum of the arguments. + * + * arguments {number|string|BigNumber} + */ + BigNumber.maximum = BigNumber.max = function () { + return maxOrMin(arguments, -1); + }; + + + /* + * Return a new BigNumber whose value is the minimum of the arguments. + * + * arguments {number|string|BigNumber} + */ + BigNumber.minimum = BigNumber.min = function () { + return maxOrMin(arguments, 1); + }; + + + /* + * Return a new BigNumber with a random value equal to or greater than 0 and less than 1, + * and with dp, or DECIMAL_PLACES if dp is omitted, decimal places (or less if trailing + * zeros are produced). + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp}' + * '[BigNumber Error] crypto unavailable' + */ + BigNumber.random = (function () { + var pow2_53 = 0x20000000000000; + + // Return a 53 bit integer n, where 0 <= n < 9007199254740992. + // Check if Math.random() produces more than 32 bits of randomness. + // If it does, assume at least 53 bits are produced, otherwise assume at least 30 bits. + // 0x40000000 is 2^30, 0x800000 is 2^23, 0x1fffff is 2^21 - 1. + var random53bitInt = (Math.random() * pow2_53) & 0x1fffff + ? function () { return mathfloor(Math.random() * pow2_53); } + : function () { return ((Math.random() * 0x40000000 | 0) * 0x800000) + + (Math.random() * 0x800000 | 0); }; + + return function (dp) { + var a, b, e, k, v, + i = 0, + c = [], + rand = new BigNumber(ONE); + + if (dp == null) dp = DECIMAL_PLACES; + else intCheck(dp, 0, MAX); + + k = mathceil(dp / LOG_BASE); + + if (CRYPTO) { + + // Browsers supporting crypto.getRandomValues. + if (crypto.getRandomValues) { + + a = crypto.getRandomValues(new Uint32Array(k *= 2)); + + for (; i < k;) { + + // 53 bits: + // ((Math.pow(2, 32) - 1) * Math.pow(2, 21)).toString(2) + // 11111 11111111 11111111 11111111 11100000 00000000 00000000 + // ((Math.pow(2, 32) - 1) >>> 11).toString(2) + // 11111 11111111 11111111 + // 0x20000 is 2^21. + v = a[i] * 0x20000 + (a[i + 1] >>> 11); + + // Rejection sampling: + // 0 <= v < 9007199254740992 + // Probability that v >= 9e15, is + // 7199254740992 / 9007199254740992 ~= 0.0008, i.e. 1 in 1251 + if (v >= 9e15) { + b = crypto.getRandomValues(new Uint32Array(2)); + a[i] = b[0]; + a[i + 1] = b[1]; + } else { + + // 0 <= v <= 8999999999999999 + // 0 <= (v % 1e14) <= 99999999999999 + c.push(v % 1e14); + i += 2; + } + } + i = k / 2; + + // Node.js supporting crypto.randomBytes. + } else if (crypto.randomBytes) { + + // buffer + a = crypto.randomBytes(k *= 7); + + for (; i < k;) { + + // 0x1000000000000 is 2^48, 0x10000000000 is 2^40 + // 0x100000000 is 2^32, 0x1000000 is 2^24 + // 11111 11111111 11111111 11111111 11111111 11111111 11111111 + // 0 <= v < 9007199254740992 + v = ((a[i] & 31) * 0x1000000000000) + (a[i + 1] * 0x10000000000) + + (a[i + 2] * 0x100000000) + (a[i + 3] * 0x1000000) + + (a[i + 4] << 16) + (a[i + 5] << 8) + a[i + 6]; + + if (v >= 9e15) { + crypto.randomBytes(7).copy(a, i); + } else { + + // 0 <= (v % 1e14) <= 99999999999999 + c.push(v % 1e14); + i += 7; + } + } + i = k / 7; + } else { + CRYPTO = false; + throw Error + (bignumberError + 'crypto unavailable'); + } + } + + // Use Math.random. + if (!CRYPTO) { + + for (; i < k;) { + v = random53bitInt(); + if (v < 9e15) c[i++] = v % 1e14; + } + } + + k = c[--i]; + dp %= LOG_BASE; + + // Convert trailing digits to zeros according to dp. + if (k && dp) { + v = POWS_TEN[LOG_BASE - dp]; + c[i] = mathfloor(k / v) * v; + } + + // Remove trailing elements which are zero. + for (; c[i] === 0; c.pop(), i--); + + // Zero? + if (i < 0) { + c = [e = 0]; + } else { + + // Remove leading elements which are zero and adjust exponent accordingly. + for (e = -1 ; c[0] === 0; c.splice(0, 1), e -= LOG_BASE); + + // Count the digits of the first element of c to determine leading zeros, and... + for (i = 1, v = c[0]; v >= 10; v /= 10, i++); + + // adjust the exponent accordingly. + if (i < LOG_BASE) e -= LOG_BASE - i; + } + + rand.e = e; + rand.c = c; + return rand; + }; + })(); + + + /* + * Return a BigNumber whose value is the sum of the arguments. + * + * arguments {number|string|BigNumber} + */ + BigNumber.sum = function () { + var i = 1, + args = arguments, + sum = new BigNumber(args[0]); + for (; i < args.length;) sum = sum.plus(args[i++]); + return sum; + }; + + + // PRIVATE FUNCTIONS + + + // Called by BigNumber and BigNumber.prototype.toString. + convertBase = (function () { + var decimal = '0123456789'; + + /* + * Convert string of baseIn to an array of numbers of baseOut. + * Eg. toBaseOut('255', 10, 16) returns [15, 15]. + * Eg. toBaseOut('ff', 16, 10) returns [2, 5, 5]. + */ + function toBaseOut(str, baseIn, baseOut, alphabet) { + var j, + arr = [0], + arrL, + i = 0, + len = str.length; + + for (; i < len;) { + for (arrL = arr.length; arrL--; arr[arrL] *= baseIn); + + arr[0] += alphabet.indexOf(str.charAt(i++)); + + for (j = 0; j < arr.length; j++) { + + if (arr[j] > baseOut - 1) { + if (arr[j + 1] == null) arr[j + 1] = 0; + arr[j + 1] += arr[j] / baseOut | 0; + arr[j] %= baseOut; + } + } + } + + return arr.reverse(); + } + + // Convert a numeric string of baseIn to a numeric string of baseOut. + // If the caller is toString, we are converting from base 10 to baseOut. + // If the caller is BigNumber, we are converting from baseIn to base 10. + return function (str, baseIn, baseOut, sign, callerIsToString) { + var alphabet, d, e, k, r, x, xc, y, + i = str.indexOf('.'), + dp = DECIMAL_PLACES, + rm = ROUNDING_MODE; + + // Non-integer. + if (i >= 0) { + k = POW_PRECISION; + + // Unlimited precision. + POW_PRECISION = 0; + str = str.replace('.', ''); + y = new BigNumber(baseIn); + x = y.pow(str.length - i); + POW_PRECISION = k; + + // Convert str as if an integer, then restore the fraction part by dividing the + // result by its base raised to a power. + + y.c = toBaseOut(toFixedPoint(coeffToString(x.c), x.e, '0'), + 10, baseOut, decimal); + y.e = y.c.length; + } + + // Convert the number as integer. + + xc = toBaseOut(str, baseIn, baseOut, callerIsToString + ? (alphabet = ALPHABET, decimal) + : (alphabet = decimal, ALPHABET)); + + // xc now represents str as an integer and converted to baseOut. e is the exponent. + e = k = xc.length; + + // Remove trailing zeros. + for (; xc[--k] == 0; xc.pop()); + + // Zero? + if (!xc[0]) return alphabet.charAt(0); + + // Does str represent an integer? If so, no need for the division. + if (i < 0) { + --e; + } else { + x.c = xc; + x.e = e; + + // The sign is needed for correct rounding. + x.s = sign; + x = div(x, y, dp, rm, baseOut); + xc = x.c; + r = x.r; + e = x.e; + } + + // xc now represents str converted to baseOut. + + // The index of the rounding digit. + d = e + dp + 1; + + // The rounding digit: the digit to the right of the digit that may be rounded up. + i = xc[d]; + + // Look at the rounding digits and mode to determine whether to round up. + + k = baseOut / 2; + r = r || d < 0 || xc[d + 1] != null; + + r = rm < 4 ? (i != null || r) && (rm == 0 || rm == (x.s < 0 ? 3 : 2)) + : i > k || i == k &&(rm == 4 || r || rm == 6 && xc[d - 1] & 1 || + rm == (x.s < 0 ? 8 : 7)); + + // If the index of the rounding digit is not greater than zero, or xc represents + // zero, then the result of the base conversion is zero or, if rounding up, a value + // such as 0.00001. + if (d < 1 || !xc[0]) { + + // 1^-dp or 0 + str = r ? toFixedPoint(alphabet.charAt(1), -dp, alphabet.charAt(0)) : alphabet.charAt(0); + } else { + + // Truncate xc to the required number of decimal places. + xc.length = d; + + // Round up? + if (r) { + + // Rounding up may mean the previous digit has to be rounded up and so on. + for (--baseOut; ++xc[--d] > baseOut;) { + xc[d] = 0; + + if (!d) { + ++e; + xc = [1].concat(xc); + } + } + } + + // Determine trailing zeros. + for (k = xc.length; !xc[--k];); + + // E.g. [4, 11, 15] becomes 4bf. + for (i = 0, str = ''; i <= k; str += alphabet.charAt(xc[i++])); + + // Add leading zeros, decimal point and trailing zeros as required. + str = toFixedPoint(str, e, alphabet.charAt(0)); + } + + // The caller will add the sign. + return str; + }; + })(); + + + // Perform division in the specified base. Called by div and convertBase. + div = (function () { + + // Assume non-zero x and k. + function multiply(x, k, base) { + var m, temp, xlo, xhi, + carry = 0, + i = x.length, + klo = k % SQRT_BASE, + khi = k / SQRT_BASE | 0; + + for (x = x.slice(); i--;) { + xlo = x[i] % SQRT_BASE; + xhi = x[i] / SQRT_BASE | 0; + m = khi * xlo + xhi * klo; + temp = klo * xlo + ((m % SQRT_BASE) * SQRT_BASE) + carry; + carry = (temp / base | 0) + (m / SQRT_BASE | 0) + khi * xhi; + x[i] = temp % base; + } + + if (carry) x = [carry].concat(x); + + return x; + } + + function compare(a, b, aL, bL) { + var i, cmp; + + if (aL != bL) { + cmp = aL > bL ? 1 : -1; + } else { + + for (i = cmp = 0; i < aL; i++) { + + if (a[i] != b[i]) { + cmp = a[i] > b[i] ? 1 : -1; + break; + } + } + } + + return cmp; + } + + function subtract(a, b, aL, base) { + var i = 0; + + // Subtract b from a. + for (; aL--;) { + a[aL] -= i; + i = a[aL] < b[aL] ? 1 : 0; + a[aL] = i * base + a[aL] - b[aL]; + } + + // Remove leading zeros. + for (; !a[0] && a.length > 1; a.splice(0, 1)); + } + + // x: dividend, y: divisor. + return function (x, y, dp, rm, base) { + var cmp, e, i, more, n, prod, prodL, q, qc, rem, remL, rem0, xi, xL, yc0, + yL, yz, + s = x.s == y.s ? 1 : -1, + xc = x.c, + yc = y.c; + + // Either NaN, Infinity or 0? + if (!xc || !xc[0] || !yc || !yc[0]) { + + return new BigNumber( + + // Return NaN if either NaN, or both Infinity or 0. + !x.s || !y.s || (xc ? yc && xc[0] == yc[0] : !yc) ? NaN : + + // Return ±0 if x is ±0 or y is ±Infinity, or return ±Infinity as y is ±0. + xc && xc[0] == 0 || !yc ? s * 0 : s / 0 + ); + } + + q = new BigNumber(s); + qc = q.c = []; + e = x.e - y.e; + s = dp + e + 1; + + if (!base) { + base = BASE; + e = bitFloor(x.e / LOG_BASE) - bitFloor(y.e / LOG_BASE); + s = s / LOG_BASE | 0; + } + + // Result exponent may be one less then the current value of e. + // The coefficients of the BigNumbers from convertBase may have trailing zeros. + for (i = 0; yc[i] == (xc[i] || 0); i++); + + if (yc[i] > (xc[i] || 0)) e--; + + if (s < 0) { + qc.push(1); + more = true; + } else { + xL = xc.length; + yL = yc.length; + i = 0; + s += 2; + + // Normalise xc and yc so highest order digit of yc is >= base / 2. + + n = mathfloor(base / (yc[0] + 1)); + + // Not necessary, but to handle odd bases where yc[0] == (base / 2) - 1. + // if (n > 1 || n++ == 1 && yc[0] < base / 2) { + if (n > 1) { + yc = multiply(yc, n, base); + xc = multiply(xc, n, base); + yL = yc.length; + xL = xc.length; + } + + xi = yL; + rem = xc.slice(0, yL); + remL = rem.length; + + // Add zeros to make remainder as long as divisor. + for (; remL < yL; rem[remL++] = 0); + yz = yc.slice(); + yz = [0].concat(yz); + yc0 = yc[0]; + if (yc[1] >= base / 2) yc0++; + // Not necessary, but to prevent trial digit n > base, when using base 3. + // else if (base == 3 && yc0 == 1) yc0 = 1 + 1e-15; + + do { + n = 0; + + // Compare divisor and remainder. + cmp = compare(yc, rem, yL, remL); + + // If divisor < remainder. + if (cmp < 0) { + + // Calculate trial digit, n. + + rem0 = rem[0]; + if (yL != remL) rem0 = rem0 * base + (rem[1] || 0); + + // n is how many times the divisor goes into the current remainder. + n = mathfloor(rem0 / yc0); + + // Algorithm: + // product = divisor multiplied by trial digit (n). + // Compare product and remainder. + // If product is greater than remainder: + // Subtract divisor from product, decrement trial digit. + // Subtract product from remainder. + // If product was less than remainder at the last compare: + // Compare new remainder and divisor. + // If remainder is greater than divisor: + // Subtract divisor from remainder, increment trial digit. + + if (n > 1) { + + // n may be > base only when base is 3. + if (n >= base) n = base - 1; + + // product = divisor * trial digit. + prod = multiply(yc, n, base); + prodL = prod.length; + remL = rem.length; + + // Compare product and remainder. + // If product > remainder then trial digit n too high. + // n is 1 too high about 5% of the time, and is not known to have + // ever been more than 1 too high. + while (compare(prod, rem, prodL, remL) == 1) { + n--; + + // Subtract divisor from product. + subtract(prod, yL < prodL ? yz : yc, prodL, base); + prodL = prod.length; + cmp = 1; + } + } else { + + // n is 0 or 1, cmp is -1. + // If n is 0, there is no need to compare yc and rem again below, + // so change cmp to 1 to avoid it. + // If n is 1, leave cmp as -1, so yc and rem are compared again. + if (n == 0) { + + // divisor < remainder, so n must be at least 1. + cmp = n = 1; + } + + // product = divisor + prod = yc.slice(); + prodL = prod.length; + } + + if (prodL < remL) prod = [0].concat(prod); + + // Subtract product from remainder. + subtract(rem, prod, remL, base); + remL = rem.length; + + // If product was < remainder. + if (cmp == -1) { + + // Compare divisor and new remainder. + // If divisor < new remainder, subtract divisor from remainder. + // Trial digit n too low. + // n is 1 too low about 5% of the time, and very rarely 2 too low. + while (compare(yc, rem, yL, remL) < 1) { + n++; + + // Subtract divisor from remainder. + subtract(rem, yL < remL ? yz : yc, remL, base); + remL = rem.length; + } + } + } else if (cmp === 0) { + n++; + rem = [0]; + } // else cmp === 1 and n will be 0 + + // Add the next digit, n, to the result array. + qc[i++] = n; + + // Update the remainder. + if (rem[0]) { + rem[remL++] = xc[xi] || 0; + } else { + rem = [xc[xi]]; + remL = 1; + } + } while ((xi++ < xL || rem[0] != null) && s--); + + more = rem[0] != null; + + // Leading zero? + if (!qc[0]) qc.splice(0, 1); + } + + if (base == BASE) { + + // To calculate q.e, first get the number of digits of qc[0]. + for (i = 1, s = qc[0]; s >= 10; s /= 10, i++); + + round(q, dp + (q.e = i + e * LOG_BASE - 1) + 1, rm, more); + + // Caller is convertBase. + } else { + q.e = e; + q.r = +more; + } + + return q; + }; + })(); + + + /* + * Return a string representing the value of BigNumber n in fixed-point or exponential + * notation rounded to the specified decimal places or significant digits. + * + * n: a BigNumber. + * i: the index of the last digit required (i.e. the digit that may be rounded up). + * rm: the rounding mode. + * id: 1 (toExponential) or 2 (toPrecision). + */ + function format(n, i, rm, id) { + var c0, e, ne, len, str; + + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); + + if (!n.c) return n.toString(); + + c0 = n.c[0]; + ne = n.e; + + if (i == null) { + str = coeffToString(n.c); + str = id == 1 || id == 2 && (ne <= TO_EXP_NEG || ne >= TO_EXP_POS) + ? toExponential(str, ne) + : toFixedPoint(str, ne, '0'); + } else { + n = round(new BigNumber(n), i, rm); + + // n.e may have changed if the value was rounded up. + e = n.e; + + str = coeffToString(n.c); + len = str.length; + + // toPrecision returns exponential notation if the number of significant digits + // specified is less than the number of digits necessary to represent the integer + // part of the value in fixed-point notation. + + // Exponential notation. + if (id == 1 || id == 2 && (i <= e || e <= TO_EXP_NEG)) { + + // Append zeros? + for (; len < i; str += '0', len++); + str = toExponential(str, e); + + // Fixed-point notation. + } else { + i -= ne; + str = toFixedPoint(str, e, '0'); + + // Append zeros? + if (e + 1 > len) { + if (--i > 0) for (str += '.'; i--; str += '0'); + } else { + i += e - len; + if (i > 0) { + if (e + 1 == len) str += '.'; + for (; i--; str += '0'); + } + } + } + } + + return n.s < 0 && c0 ? '-' + str : str; + } + + + // Handle BigNumber.max and BigNumber.min. + // If any number is NaN, return NaN. + function maxOrMin(args, n) { + var k, y, + i = 1, + x = new BigNumber(args[0]); + + for (; i < args.length; i++) { + y = new BigNumber(args[i]); + if (!y.s || (k = compare(x, y)) === n || k === 0 && x.s === n) { + x = y; + } + } + + return x; + } + + + /* + * Strip trailing zeros, calculate base 10 exponent and check against MIN_EXP and MAX_EXP. + * Called by minus, plus and times. + */ + function normalise(n, c, e) { + var i = 1, + j = c.length; + + // Remove trailing zeros. + for (; !c[--j]; c.pop()); + + // Calculate the base 10 exponent. First get the number of digits of c[0]. + for (j = c[0]; j >= 10; j /= 10, i++); + + // Overflow? + if ((e = i + e * LOG_BASE - 1) > MAX_EXP) { + + // Infinity. + n.c = n.e = null; + + // Underflow? + } else if (e < MIN_EXP) { + + // Zero. + n.c = [n.e = 0]; + } else { + n.e = e; + n.c = c; + } + + return n; + } + + + // Handle values that fail the validity test in BigNumber. + parseNumeric = (function () { + var basePrefix = /^(-?)0([xbo])(?=\w[\w.]*$)/i, + dotAfter = /^([^.]+)\.$/, + dotBefore = /^\.([^.]+)$/, + isInfinityOrNaN = /^-?(Infinity|NaN)$/, + whitespaceOrPlus = /^\s*\+(?=[\w.])|^\s+|\s+$/g; + + return function (x, str, isNum, b) { + var base, + s = isNum ? str : str.replace(whitespaceOrPlus, ''); + + // No exception on ±Infinity or NaN. + if (isInfinityOrNaN.test(s)) { + x.s = isNaN(s) ? null : s < 0 ? -1 : 1; + } else { + if (!isNum) { + + // basePrefix = /^(-?)0([xbo])(?=\w[\w.]*$)/i + s = s.replace(basePrefix, function (m, p1, p2) { + base = (p2 = p2.toLowerCase()) == 'x' ? 16 : p2 == 'b' ? 2 : 8; + return !b || b == base ? p1 : m; + }); + + if (b) { + base = b; + + // E.g. '1.' to '1', '.1' to '0.1' + s = s.replace(dotAfter, '$1').replace(dotBefore, '0.$1'); + } + + if (str != s) return new BigNumber(s, base); + } + + // '[BigNumber Error] Not a number: {n}' + // '[BigNumber Error] Not a base {b} number: {n}' + if (BigNumber.DEBUG) { + throw Error + (bignumberError + 'Not a' + (b ? ' base ' + b : '') + ' number: ' + str); + } + + // NaN + x.s = null; + } + + x.c = x.e = null; + } + })(); + + + /* + * Round x to sd significant digits using rounding mode rm. Check for over/under-flow. + * If r is truthy, it is known that there are more digits after the rounding digit. + */ + function round(x, sd, rm, r) { + var d, i, j, k, n, ni, rd, + xc = x.c, + pows10 = POWS_TEN; + + // if x is not Infinity or NaN... + if (xc) { + + // rd is the rounding digit, i.e. the digit after the digit that may be rounded up. + // n is a base 1e14 number, the value of the element of array x.c containing rd. + // ni is the index of n within x.c. + // d is the number of digits of n. + // i is the index of rd within n including leading zeros. + // j is the actual index of rd within n (if < 0, rd is a leading zero). + out: { + + // Get the number of digits of the first element of xc. + for (d = 1, k = xc[0]; k >= 10; k /= 10, d++); + i = sd - d; + + // If the rounding digit is in the first element of xc... + if (i < 0) { + i += LOG_BASE; + j = sd; + n = xc[ni = 0]; + + // Get the rounding digit at index j of n. + rd = mathfloor(n / pows10[d - j - 1] % 10); + } else { + ni = mathceil((i + 1) / LOG_BASE); + + if (ni >= xc.length) { + + if (r) { + + // Needed by sqrt. + for (; xc.length <= ni; xc.push(0)); + n = rd = 0; + d = 1; + i %= LOG_BASE; + j = i - LOG_BASE + 1; + } else { + break out; + } + } else { + n = k = xc[ni]; + + // Get the number of digits of n. + for (d = 1; k >= 10; k /= 10, d++); + + // Get the index of rd within n. + i %= LOG_BASE; + + // Get the index of rd within n, adjusted for leading zeros. + // The number of leading zeros of n is given by LOG_BASE - d. + j = i - LOG_BASE + d; + + // Get the rounding digit at index j of n. + rd = j < 0 ? 0 : mathfloor(n / pows10[d - j - 1] % 10); + } + } + + r = r || sd < 0 || + + // Are there any non-zero digits after the rounding digit? + // The expression n % pows10[d - j - 1] returns all digits of n to the right + // of the digit at j, e.g. if n is 908714 and j is 2, the expression gives 714. + xc[ni + 1] != null || (j < 0 ? n : n % pows10[d - j - 1]); + + r = rm < 4 + ? (rd || r) && (rm == 0 || rm == (x.s < 0 ? 3 : 2)) + : rd > 5 || rd == 5 && (rm == 4 || r || rm == 6 && + + // Check whether the digit to the left of the rounding digit is odd. + ((i > 0 ? j > 0 ? n / pows10[d - j] : 0 : xc[ni - 1]) % 10) & 1 || + rm == (x.s < 0 ? 8 : 7)); + + if (sd < 1 || !xc[0]) { + xc.length = 0; + + if (r) { + + // Convert sd to decimal places. + sd -= x.e + 1; + + // 1, 0.1, 0.01, 0.001, 0.0001 etc. + xc[0] = pows10[(LOG_BASE - sd % LOG_BASE) % LOG_BASE]; + x.e = -sd || 0; + } else { + + // Zero. + xc[0] = x.e = 0; + } + + return x; + } + + // Remove excess digits. + if (i == 0) { + xc.length = ni; + k = 1; + ni--; + } else { + xc.length = ni + 1; + k = pows10[LOG_BASE - i]; + + // E.g. 56700 becomes 56000 if 7 is the rounding digit. + // j > 0 means i > number of leading zeros of n. + xc[ni] = j > 0 ? mathfloor(n / pows10[d - j] % pows10[j]) * k : 0; + } + + // Round up? + if (r) { + + for (; ;) { + + // If the digit to be rounded up is in the first element of xc... + if (ni == 0) { + + // i will be the length of xc[0] before k is added. + for (i = 1, j = xc[0]; j >= 10; j /= 10, i++); + j = xc[0] += k; + for (k = 1; j >= 10; j /= 10, k++); + + // if i != k the length has increased. + if (i != k) { + x.e++; + if (xc[0] == BASE) xc[0] = 1; + } + + break; + } else { + xc[ni] += k; + if (xc[ni] != BASE) break; + xc[ni--] = 0; + k = 1; + } + } + } + + // Remove trailing zeros. + for (i = xc.length; xc[--i] === 0; xc.pop()); + } + + // Overflow? Infinity. + if (x.e > MAX_EXP) { + x.c = x.e = null; + + // Underflow? Zero. + } else if (x.e < MIN_EXP) { + x.c = [x.e = 0]; + } + } + + return x; + } + + + function valueOf(n) { + var str, + e = n.e; + + if (e === null) return n.toString(); + + str = coeffToString(n.c); + + str = e <= TO_EXP_NEG || e >= TO_EXP_POS + ? toExponential(str, e) + : toFixedPoint(str, e, '0'); + + return n.s < 0 ? '-' + str : str; + } + + + // PROTOTYPE/INSTANCE METHODS + + + /* + * Return a new BigNumber whose value is the absolute value of this BigNumber. + */ + P.absoluteValue = P.abs = function () { + var x = new BigNumber(this); + if (x.s < 0) x.s = 1; + return x; + }; + + + /* + * Return + * 1 if the value of this BigNumber is greater than the value of BigNumber(y, b), + * -1 if the value of this BigNumber is less than the value of BigNumber(y, b), + * 0 if they have the same value, + * or null if the value of either is NaN. + */ + P.comparedTo = function (y, b) { + return compare(this, new BigNumber(y, b)); + }; + + + /* + * If dp is undefined or null or true or false, return the number of decimal places of the + * value of this BigNumber, or null if the value of this BigNumber is ±Infinity or NaN. + * + * Otherwise, if dp is a number, return a new BigNumber whose value is the value of this + * BigNumber rounded to a maximum of dp decimal places using rounding mode rm, or + * ROUNDING_MODE if rm is omitted. + * + * [dp] {number} Decimal places: integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + */ + P.decimalPlaces = P.dp = function (dp, rm) { + var c, n, v, + x = this; + + if (dp != null) { + intCheck(dp, 0, MAX); + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); + + return round(new BigNumber(x), dp + x.e + 1, rm); + } + + if (!(c = x.c)) return null; + n = ((v = c.length - 1) - bitFloor(this.e / LOG_BASE)) * LOG_BASE; + + // Subtract the number of trailing zeros of the last number. + if (v = c[v]) for (; v % 10 == 0; v /= 10, n--); + if (n < 0) n = 0; + + return n; + }; + + + /* + * n / 0 = I + * n / N = N + * n / I = 0 + * 0 / n = 0 + * 0 / 0 = N + * 0 / N = N + * 0 / I = 0 + * N / n = N + * N / 0 = N + * N / N = N + * N / I = N + * I / n = I + * I / 0 = I + * I / N = N + * I / I = N + * + * Return a new BigNumber whose value is the value of this BigNumber divided by the value of + * BigNumber(y, b), rounded according to DECIMAL_PLACES and ROUNDING_MODE. + */ + P.dividedBy = P.div = function (y, b) { + return div(this, new BigNumber(y, b), DECIMAL_PLACES, ROUNDING_MODE); + }; + + + /* + * Return a new BigNumber whose value is the integer part of dividing the value of this + * BigNumber by the value of BigNumber(y, b). + */ + P.dividedToIntegerBy = P.idiv = function (y, b) { + return div(this, new BigNumber(y, b), 0, 1); + }; + + + /* + * Return a BigNumber whose value is the value of this BigNumber exponentiated by n. + * + * If m is present, return the result modulo m. + * If n is negative round according to DECIMAL_PLACES and ROUNDING_MODE. + * If POW_PRECISION is non-zero and m is not present, round to POW_PRECISION using ROUNDING_MODE. + * + * The modular power operation works efficiently when x, n, and m are integers, otherwise it + * is equivalent to calculating x.exponentiatedBy(n).modulo(m) with a POW_PRECISION of 0. + * + * n {number|string|BigNumber} The exponent. An integer. + * [m] {number|string|BigNumber} The modulus. + * + * '[BigNumber Error] Exponent not an integer: {n}' + */ + P.exponentiatedBy = P.pow = function (n, m) { + var half, isModExp, i, k, more, nIsBig, nIsNeg, nIsOdd, y, + x = this; + + n = new BigNumber(n); + + // Allow NaN and ±Infinity, but not other non-integers. + if (n.c && !n.isInteger()) { + throw Error + (bignumberError + 'Exponent not an integer: ' + valueOf(n)); + } + + if (m != null) m = new BigNumber(m); + + // Exponent of MAX_SAFE_INTEGER is 15. + nIsBig = n.e > 14; + + // If x is NaN, ±Infinity, ±0 or ±1, or n is ±Infinity, NaN or ±0. + if (!x.c || !x.c[0] || x.c[0] == 1 && !x.e && x.c.length == 1 || !n.c || !n.c[0]) { + + // The sign of the result of pow when x is negative depends on the evenness of n. + // If +n overflows to ±Infinity, the evenness of n would be not be known. + y = new BigNumber(Math.pow(+valueOf(x), nIsBig ? n.s * (2 - isOdd(n)) : +valueOf(n))); + return m ? y.mod(m) : y; + } + + nIsNeg = n.s < 0; + + if (m) { + + // x % m returns NaN if abs(m) is zero, or m is NaN. + if (m.c ? !m.c[0] : !m.s) return new BigNumber(NaN); + + isModExp = !nIsNeg && x.isInteger() && m.isInteger(); + + if (isModExp) x = x.mod(m); + + // Overflow to ±Infinity: >=2**1e10 or >=1.0000024**1e15. + // Underflow to ±0: <=0.79**1e10 or <=0.9999975**1e15. + } else if (n.e > 9 && (x.e > 0 || x.e < -1 || (x.e == 0 + // [1, 240000000] + ? x.c[0] > 1 || nIsBig && x.c[1] >= 24e7 + // [80000000000000] [99999750000000] + : x.c[0] < 8e13 || nIsBig && x.c[0] <= 9999975e7))) { + + // If x is negative and n is odd, k = -0, else k = 0. + k = x.s < 0 && isOdd(n) ? -0 : 0; + + // If x >= 1, k = ±Infinity. + if (x.e > -1) k = 1 / k; + + // If n is negative return ±0, else return ±Infinity. + return new BigNumber(nIsNeg ? 1 / k : k); + + } else if (POW_PRECISION) { + + // Truncating each coefficient array to a length of k after each multiplication + // equates to truncating significant digits to POW_PRECISION + [28, 41], + // i.e. there will be a minimum of 28 guard digits retained. + k = mathceil(POW_PRECISION / LOG_BASE + 2); + } + + if (nIsBig) { + half = new BigNumber(0.5); + if (nIsNeg) n.s = 1; + nIsOdd = isOdd(n); + } else { + i = Math.abs(+valueOf(n)); + nIsOdd = i % 2; + } + + y = new BigNumber(ONE); + + // Performs 54 loop iterations for n of 9007199254740991. + for (; ;) { + + if (nIsOdd) { + y = y.times(x); + if (!y.c) break; + + if (k) { + if (y.c.length > k) y.c.length = k; + } else if (isModExp) { + y = y.mod(m); //y = y.minus(div(y, m, 0, MODULO_MODE).times(m)); + } + } + + if (i) { + i = mathfloor(i / 2); + if (i === 0) break; + nIsOdd = i % 2; + } else { + n = n.times(half); + round(n, n.e + 1, 1); + + if (n.e > 14) { + nIsOdd = isOdd(n); + } else { + i = +valueOf(n); + if (i === 0) break; + nIsOdd = i % 2; + } + } + + x = x.times(x); + + if (k) { + if (x.c && x.c.length > k) x.c.length = k; + } else if (isModExp) { + x = x.mod(m); //x = x.minus(div(x, m, 0, MODULO_MODE).times(m)); + } + } + + if (isModExp) return y; + if (nIsNeg) y = ONE.div(y); + + return m ? y.mod(m) : k ? round(y, POW_PRECISION, ROUNDING_MODE, more) : y; + }; + + + /* + * Return a new BigNumber whose value is the value of this BigNumber rounded to an integer + * using rounding mode rm, or ROUNDING_MODE if rm is omitted. + * + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {rm}' + */ + P.integerValue = function (rm) { + var n = new BigNumber(this); + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); + return round(n, n.e + 1, rm); + }; + + + /* + * Return true if the value of this BigNumber is equal to the value of BigNumber(y, b), + * otherwise return false. + */ + P.isEqualTo = P.eq = function (y, b) { + return compare(this, new BigNumber(y, b)) === 0; + }; + + + /* + * Return true if the value of this BigNumber is a finite number, otherwise return false. + */ + P.isFinite = function () { + return !!this.c; + }; + + + /* + * Return true if the value of this BigNumber is greater than the value of BigNumber(y, b), + * otherwise return false. + */ + P.isGreaterThan = P.gt = function (y, b) { + return compare(this, new BigNumber(y, b)) > 0; + }; + + + /* + * Return true if the value of this BigNumber is greater than or equal to the value of + * BigNumber(y, b), otherwise return false. + */ + P.isGreaterThanOrEqualTo = P.gte = function (y, b) { + return (b = compare(this, new BigNumber(y, b))) === 1 || b === 0; + + }; + + + /* + * Return true if the value of this BigNumber is an integer, otherwise return false. + */ + P.isInteger = function () { + return !!this.c && bitFloor(this.e / LOG_BASE) > this.c.length - 2; + }; + + + /* + * Return true if the value of this BigNumber is less than the value of BigNumber(y, b), + * otherwise return false. + */ + P.isLessThan = P.lt = function (y, b) { + return compare(this, new BigNumber(y, b)) < 0; + }; + + + /* + * Return true if the value of this BigNumber is less than or equal to the value of + * BigNumber(y, b), otherwise return false. + */ + P.isLessThanOrEqualTo = P.lte = function (y, b) { + return (b = compare(this, new BigNumber(y, b))) === -1 || b === 0; + }; + + + /* + * Return true if the value of this BigNumber is NaN, otherwise return false. + */ + P.isNaN = function () { + return !this.s; + }; + + + /* + * Return true if the value of this BigNumber is negative, otherwise return false. + */ + P.isNegative = function () { + return this.s < 0; + }; + + + /* + * Return true if the value of this BigNumber is positive, otherwise return false. + */ + P.isPositive = function () { + return this.s > 0; + }; + + + /* + * Return true if the value of this BigNumber is 0 or -0, otherwise return false. + */ + P.isZero = function () { + return !!this.c && this.c[0] == 0; + }; + + + /* + * n - 0 = n + * n - N = N + * n - I = -I + * 0 - n = -n + * 0 - 0 = 0 + * 0 - N = N + * 0 - I = -I + * N - n = N + * N - 0 = N + * N - N = N + * N - I = N + * I - n = I + * I - 0 = I + * I - N = N + * I - I = N + * + * Return a new BigNumber whose value is the value of this BigNumber minus the value of + * BigNumber(y, b). + */ + P.minus = function (y, b) { + var i, j, t, xLTy, + x = this, + a = x.s; + + y = new BigNumber(y, b); + b = y.s; + + // Either NaN? + if (!a || !b) return new BigNumber(NaN); + + // Signs differ? + if (a != b) { + y.s = -b; + return x.plus(y); + } + + var xe = x.e / LOG_BASE, + ye = y.e / LOG_BASE, + xc = x.c, + yc = y.c; + + if (!xe || !ye) { + + // Either Infinity? + if (!xc || !yc) return xc ? (y.s = -b, y) : new BigNumber(yc ? x : NaN); + + // Either zero? + if (!xc[0] || !yc[0]) { + + // Return y if y is non-zero, x if x is non-zero, or zero if both are zero. + return yc[0] ? (y.s = -b, y) : new BigNumber(xc[0] ? x : + + // IEEE 754 (2008) 6.3: n - n = -0 when rounding to -Infinity + ROUNDING_MODE == 3 ? -0 : 0); + } + } + + xe = bitFloor(xe); + ye = bitFloor(ye); + xc = xc.slice(); + + // Determine which is the bigger number. + if (a = xe - ye) { + + if (xLTy = a < 0) { + a = -a; + t = xc; + } else { + ye = xe; + t = yc; + } + + t.reverse(); + + // Prepend zeros to equalise exponents. + for (b = a; b--; t.push(0)); + t.reverse(); + } else { + + // Exponents equal. Check digit by digit. + j = (xLTy = (a = xc.length) < (b = yc.length)) ? a : b; + + for (a = b = 0; b < j; b++) { + + if (xc[b] != yc[b]) { + xLTy = xc[b] < yc[b]; + break; + } + } + } + + // x < y? Point xc to the array of the bigger number. + if (xLTy) { + t = xc; + xc = yc; + yc = t; + y.s = -y.s; + } + + b = (j = yc.length) - (i = xc.length); + + // Append zeros to xc if shorter. + // No need to add zeros to yc if shorter as subtract only needs to start at yc.length. + if (b > 0) for (; b--; xc[i++] = 0); + b = BASE - 1; + + // Subtract yc from xc. + for (; j > a;) { + + if (xc[--j] < yc[j]) { + for (i = j; i && !xc[--i]; xc[i] = b); + --xc[i]; + xc[j] += BASE; + } + + xc[j] -= yc[j]; + } + + // Remove leading zeros and adjust exponent accordingly. + for (; xc[0] == 0; xc.splice(0, 1), --ye); + + // Zero? + if (!xc[0]) { + + // Following IEEE 754 (2008) 6.3, + // n - n = +0 but n - n = -0 when rounding towards -Infinity. + y.s = ROUNDING_MODE == 3 ? -1 : 1; + y.c = [y.e = 0]; + return y; + } + + // No need to check for Infinity as +x - +y != Infinity && -x - -y != Infinity + // for finite x and y. + return normalise(y, xc, ye); + }; + + + /* + * n % 0 = N + * n % N = N + * n % I = n + * 0 % n = 0 + * -0 % n = -0 + * 0 % 0 = N + * 0 % N = N + * 0 % I = 0 + * N % n = N + * N % 0 = N + * N % N = N + * N % I = N + * I % n = N + * I % 0 = N + * I % N = N + * I % I = N + * + * Return a new BigNumber whose value is the value of this BigNumber modulo the value of + * BigNumber(y, b). The result depends on the value of MODULO_MODE. + */ + P.modulo = P.mod = function (y, b) { + var q, s, + x = this; + + y = new BigNumber(y, b); + + // Return NaN if x is Infinity or NaN, or y is NaN or zero. + if (!x.c || !y.s || y.c && !y.c[0]) { + return new BigNumber(NaN); + + // Return x if y is Infinity or x is zero. + } else if (!y.c || x.c && !x.c[0]) { + return new BigNumber(x); + } + + if (MODULO_MODE == 9) { + + // Euclidian division: q = sign(y) * floor(x / abs(y)) + // r = x - qy where 0 <= r < abs(y) + s = y.s; + y.s = 1; + q = div(x, y, 0, 3); + y.s = s; + q.s *= s; + } else { + q = div(x, y, 0, MODULO_MODE); + } + + y = x.minus(q.times(y)); + + // To match JavaScript %, ensure sign of zero is sign of dividend. + if (!y.c[0] && MODULO_MODE == 1) y.s = x.s; + + return y; + }; + + + /* + * n * 0 = 0 + * n * N = N + * n * I = I + * 0 * n = 0 + * 0 * 0 = 0 + * 0 * N = N + * 0 * I = N + * N * n = N + * N * 0 = N + * N * N = N + * N * I = N + * I * n = I + * I * 0 = N + * I * N = N + * I * I = I + * + * Return a new BigNumber whose value is the value of this BigNumber multiplied by the value + * of BigNumber(y, b). + */ + P.multipliedBy = P.times = function (y, b) { + var c, e, i, j, k, m, xcL, xlo, xhi, ycL, ylo, yhi, zc, + base, sqrtBase, + x = this, + xc = x.c, + yc = (y = new BigNumber(y, b)).c; + + // Either NaN, ±Infinity or ±0? + if (!xc || !yc || !xc[0] || !yc[0]) { + + // Return NaN if either is NaN, or one is 0 and the other is Infinity. + if (!x.s || !y.s || xc && !xc[0] && !yc || yc && !yc[0] && !xc) { + y.c = y.e = y.s = null; + } else { + y.s *= x.s; + + // Return ±Infinity if either is ±Infinity. + if (!xc || !yc) { + y.c = y.e = null; + + // Return ±0 if either is ±0. + } else { + y.c = [0]; + y.e = 0; + } + } + + return y; + } + + e = bitFloor(x.e / LOG_BASE) + bitFloor(y.e / LOG_BASE); + y.s *= x.s; + xcL = xc.length; + ycL = yc.length; + + // Ensure xc points to longer array and xcL to its length. + if (xcL < ycL) { + zc = xc; + xc = yc; + yc = zc; + i = xcL; + xcL = ycL; + ycL = i; + } + + // Initialise the result array with zeros. + for (i = xcL + ycL, zc = []; i--; zc.push(0)); + + base = BASE; + sqrtBase = SQRT_BASE; + + for (i = ycL; --i >= 0;) { + c = 0; + ylo = yc[i] % sqrtBase; + yhi = yc[i] / sqrtBase | 0; + + for (k = xcL, j = i + k; j > i;) { + xlo = xc[--k] % sqrtBase; + xhi = xc[k] / sqrtBase | 0; + m = yhi * xlo + xhi * ylo; + xlo = ylo * xlo + ((m % sqrtBase) * sqrtBase) + zc[j] + c; + c = (xlo / base | 0) + (m / sqrtBase | 0) + yhi * xhi; + zc[j--] = xlo % base; + } + + zc[j] = c; + } + + if (c) { + ++e; + } else { + zc.splice(0, 1); + } + + return normalise(y, zc, e); + }; + + + /* + * Return a new BigNumber whose value is the value of this BigNumber negated, + * i.e. multiplied by -1. + */ + P.negated = function () { + var x = new BigNumber(this); + x.s = -x.s || null; + return x; + }; + + + /* + * n + 0 = n + * n + N = N + * n + I = I + * 0 + n = n + * 0 + 0 = 0 + * 0 + N = N + * 0 + I = I + * N + n = N + * N + 0 = N + * N + N = N + * N + I = N + * I + n = I + * I + 0 = I + * I + N = N + * I + I = I + * + * Return a new BigNumber whose value is the value of this BigNumber plus the value of + * BigNumber(y, b). + */ + P.plus = function (y, b) { + var t, + x = this, + a = x.s; + + y = new BigNumber(y, b); + b = y.s; + + // Either NaN? + if (!a || !b) return new BigNumber(NaN); + + // Signs differ? + if (a != b) { + y.s = -b; + return x.minus(y); + } + + var xe = x.e / LOG_BASE, + ye = y.e / LOG_BASE, + xc = x.c, + yc = y.c; + + if (!xe || !ye) { + + // Return ±Infinity if either ±Infinity. + if (!xc || !yc) return new BigNumber(a / 0); + + // Either zero? + // Return y if y is non-zero, x if x is non-zero, or zero if both are zero. + if (!xc[0] || !yc[0]) return yc[0] ? y : new BigNumber(xc[0] ? x : a * 0); + } + + xe = bitFloor(xe); + ye = bitFloor(ye); + xc = xc.slice(); + + // Prepend zeros to equalise exponents. Faster to use reverse then do unshifts. + if (a = xe - ye) { + if (a > 0) { + ye = xe; + t = yc; + } else { + a = -a; + t = xc; + } + + t.reverse(); + for (; a--; t.push(0)); + t.reverse(); + } + + a = xc.length; + b = yc.length; + + // Point xc to the longer array, and b to the shorter length. + if (a - b < 0) { + t = yc; + yc = xc; + xc = t; + b = a; + } + + // Only start adding at yc.length - 1 as the further digits of xc can be ignored. + for (a = 0; b;) { + a = (xc[--b] = xc[b] + yc[b] + a) / BASE | 0; + xc[b] = BASE === xc[b] ? 0 : xc[b] % BASE; + } + + if (a) { + xc = [a].concat(xc); + ++ye; + } + + // No need to check for zero, as +x + +y != 0 && -x + -y != 0 + // ye = MAX_EXP + 1 possible + return normalise(y, xc, ye); + }; + + + /* + * If sd is undefined or null or true or false, return the number of significant digits of + * the value of this BigNumber, or null if the value of this BigNumber is ±Infinity or NaN. + * If sd is true include integer-part trailing zeros in the count. + * + * Otherwise, if sd is a number, return a new BigNumber whose value is the value of this + * BigNumber rounded to a maximum of sd significant digits using rounding mode rm, or + * ROUNDING_MODE if rm is omitted. + * + * sd {number|boolean} number: significant digits: integer, 1 to MAX inclusive. + * boolean: whether to count integer-part trailing zeros: true or false. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {sd|rm}' + */ + P.precision = P.sd = function (sd, rm) { + var c, n, v, + x = this; + + if (sd != null && sd !== !!sd) { + intCheck(sd, 1, MAX); + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); + + return round(new BigNumber(x), sd, rm); + } + + if (!(c = x.c)) return null; + v = c.length - 1; + n = v * LOG_BASE + 1; + + if (v = c[v]) { + + // Subtract the number of trailing zeros of the last element. + for (; v % 10 == 0; v /= 10, n--); + + // Add the number of digits of the first element. + for (v = c[0]; v >= 10; v /= 10, n++); + } + + if (sd && x.e + 1 > n) n = x.e + 1; + + return n; + }; + + + /* + * Return a new BigNumber whose value is the value of this BigNumber shifted by k places + * (powers of 10). Shift to the right if n > 0, and to the left if n < 0. + * + * k {number} Integer, -MAX_SAFE_INTEGER to MAX_SAFE_INTEGER inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {k}' + */ + P.shiftedBy = function (k) { + intCheck(k, -MAX_SAFE_INTEGER, MAX_SAFE_INTEGER); + return this.times('1e' + k); + }; + + + /* + * sqrt(-n) = N + * sqrt(N) = N + * sqrt(-I) = N + * sqrt(I) = I + * sqrt(0) = 0 + * sqrt(-0) = -0 + * + * Return a new BigNumber whose value is the square root of the value of this BigNumber, + * rounded according to DECIMAL_PLACES and ROUNDING_MODE. + */ + P.squareRoot = P.sqrt = function () { + var m, n, r, rep, t, + x = this, + c = x.c, + s = x.s, + e = x.e, + dp = DECIMAL_PLACES + 4, + half = new BigNumber('0.5'); + + // Negative/NaN/Infinity/zero? + if (s !== 1 || !c || !c[0]) { + return new BigNumber(!s || s < 0 && (!c || c[0]) ? NaN : c ? x : 1 / 0); + } + + // Initial estimate. + s = Math.sqrt(+valueOf(x)); + + // Math.sqrt underflow/overflow? + // Pass x to Math.sqrt as integer, then adjust the exponent of the result. + if (s == 0 || s == 1 / 0) { + n = coeffToString(c); + if ((n.length + e) % 2 == 0) n += '0'; + s = Math.sqrt(+n); + e = bitFloor((e + 1) / 2) - (e < 0 || e % 2); + + if (s == 1 / 0) { + n = '5e' + e; + } else { + n = s.toExponential(); + n = n.slice(0, n.indexOf('e') + 1) + e; + } + + r = new BigNumber(n); + } else { + r = new BigNumber(s + ''); + } + + // Check for zero. + // r could be zero if MIN_EXP is changed after the this value was created. + // This would cause a division by zero (x/t) and hence Infinity below, which would cause + // coeffToString to throw. + if (r.c[0]) { + e = r.e; + s = e + dp; + if (s < 3) s = 0; + + // Newton-Raphson iteration. + for (; ;) { + t = r; + r = half.times(t.plus(div(x, t, dp, 1))); + + if (coeffToString(t.c).slice(0, s) === (n = coeffToString(r.c)).slice(0, s)) { + + // The exponent of r may here be one less than the final result exponent, + // e.g 0.0009999 (e-4) --> 0.001 (e-3), so adjust s so the rounding digits + // are indexed correctly. + if (r.e < e) --s; + n = n.slice(s - 3, s + 1); + + // The 4th rounding digit may be in error by -1 so if the 4 rounding digits + // are 9999 or 4999 (i.e. approaching a rounding boundary) continue the + // iteration. + if (n == '9999' || !rep && n == '4999') { + + // On the first iteration only, check to see if rounding up gives the + // exact result as the nines may infinitely repeat. + if (!rep) { + round(t, t.e + DECIMAL_PLACES + 2, 0); + + if (t.times(t).eq(x)) { + r = t; + break; + } + } + + dp += 4; + s += 4; + rep = 1; + } else { + + // If rounding digits are null, 0{0,4} or 50{0,3}, check for exact + // result. If not, then there are further digits and m will be truthy. + if (!+n || !+n.slice(1) && n.charAt(0) == '5') { + + // Truncate to the first rounding digit. + round(r, r.e + DECIMAL_PLACES + 2, 1); + m = !r.times(r).eq(x); + } + + break; + } + } + } + } + + return round(r, r.e + DECIMAL_PLACES + 1, ROUNDING_MODE, m); + }; + + + /* + * Return a string representing the value of this BigNumber in exponential notation and + * rounded using ROUNDING_MODE to dp fixed decimal places. + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + */ + P.toExponential = function (dp, rm) { + if (dp != null) { + intCheck(dp, 0, MAX); + dp++; + } + return format(this, dp, rm, 1); + }; + + + /* + * Return a string representing the value of this BigNumber in fixed-point notation rounding + * to dp fixed decimal places using rounding mode rm, or ROUNDING_MODE if rm is omitted. + * + * Note: as with JavaScript's number type, (-0).toFixed(0) is '0', + * but e.g. (-0.00001).toFixed(0) is '-0'. + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + */ + P.toFixed = function (dp, rm) { + if (dp != null) { + intCheck(dp, 0, MAX); + dp = dp + this.e + 1; + } + return format(this, dp, rm); + }; + + + /* + * Return a string representing the value of this BigNumber in fixed-point notation rounded + * using rm or ROUNDING_MODE to dp decimal places, and formatted according to the properties + * of the format or FORMAT object (see BigNumber.set). + * + * The formatting object may contain some or all of the properties shown below. + * + * FORMAT = { + * prefix: '', + * groupSize: 3, + * secondaryGroupSize: 0, + * groupSeparator: ',', + * decimalSeparator: '.', + * fractionGroupSize: 0, + * fractionGroupSeparator: '\xA0', // non-breaking space + * suffix: '' + * }; + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * [format] {object} Formatting options. See FORMAT pbject above. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + * '[BigNumber Error] Argument not an object: {format}' + */ + P.toFormat = function (dp, rm, format) { + var str, + x = this; + + if (format == null) { + if (dp != null && rm && typeof rm == 'object') { + format = rm; + rm = null; + } else if (dp && typeof dp == 'object') { + format = dp; + dp = rm = null; + } else { + format = FORMAT; + } + } else if (typeof format != 'object') { + throw Error + (bignumberError + 'Argument not an object: ' + format); + } + + str = x.toFixed(dp, rm); + + if (x.c) { + var i, + arr = str.split('.'), + g1 = +format.groupSize, + g2 = +format.secondaryGroupSize, + groupSeparator = format.groupSeparator || '', + intPart = arr[0], + fractionPart = arr[1], + isNeg = x.s < 0, + intDigits = isNeg ? intPart.slice(1) : intPart, + len = intDigits.length; + + if (g2) { + i = g1; + g1 = g2; + g2 = i; + len -= i; + } + + if (g1 > 0 && len > 0) { + i = len % g1 || g1; + intPart = intDigits.substr(0, i); + for (; i < len; i += g1) intPart += groupSeparator + intDigits.substr(i, g1); + if (g2 > 0) intPart += groupSeparator + intDigits.slice(i); + if (isNeg) intPart = '-' + intPart; + } + + str = fractionPart + ? intPart + (format.decimalSeparator || '') + ((g2 = +format.fractionGroupSize) + ? fractionPart.replace(new RegExp('\\d{' + g2 + '}\\B', 'g'), + '$&' + (format.fractionGroupSeparator || '')) + : fractionPart) + : intPart; + } + + return (format.prefix || '') + str + (format.suffix || ''); + }; + + + /* + * Return an array of two BigNumbers representing the value of this BigNumber as a simple + * fraction with an integer numerator and an integer denominator. + * The denominator will be a positive non-zero value less than or equal to the specified + * maximum denominator. If a maximum denominator is not specified, the denominator will be + * the lowest value necessary to represent the number exactly. + * + * [md] {number|string|BigNumber} Integer >= 1, or Infinity. The maximum denominator. + * + * '[BigNumber Error] Argument {not an integer|out of range} : {md}' + */ + P.toFraction = function (md) { + var d, d0, d1, d2, e, exp, n, n0, n1, q, r, s, + x = this, + xc = x.c; + + if (md != null) { + n = new BigNumber(md); + + // Throw if md is less than one or is not an integer, unless it is Infinity. + if (!n.isInteger() && (n.c || n.s !== 1) || n.lt(ONE)) { + throw Error + (bignumberError + 'Argument ' + + (n.isInteger() ? 'out of range: ' : 'not an integer: ') + valueOf(n)); + } + } + + if (!xc) return new BigNumber(x); + + d = new BigNumber(ONE); + n1 = d0 = new BigNumber(ONE); + d1 = n0 = new BigNumber(ONE); + s = coeffToString(xc); + + // Determine initial denominator. + // d is a power of 10 and the minimum max denominator that specifies the value exactly. + e = d.e = s.length - x.e - 1; + d.c[0] = POWS_TEN[(exp = e % LOG_BASE) < 0 ? LOG_BASE + exp : exp]; + md = !md || n.comparedTo(d) > 0 ? (e > 0 ? d : n1) : n; + + exp = MAX_EXP; + MAX_EXP = 1 / 0; + n = new BigNumber(s); + + // n0 = d1 = 0 + n0.c[0] = 0; + + for (; ;) { + q = div(n, d, 0, 1); + d2 = d0.plus(q.times(d1)); + if (d2.comparedTo(md) == 1) break; + d0 = d1; + d1 = d2; + n1 = n0.plus(q.times(d2 = n1)); + n0 = d2; + d = n.minus(q.times(d2 = d)); + n = d2; + } + + d2 = div(md.minus(d0), d1, 0, 1); + n0 = n0.plus(d2.times(n1)); + d0 = d0.plus(d2.times(d1)); + n0.s = n1.s = x.s; + e = e * 2; + + // Determine which fraction is closer to x, n0/d0 or n1/d1 + r = div(n1, d1, e, ROUNDING_MODE).minus(x).abs().comparedTo( + div(n0, d0, e, ROUNDING_MODE).minus(x).abs()) < 1 ? [n1, d1] : [n0, d0]; + + MAX_EXP = exp; + + return r; + }; + + + /* + * Return the value of this BigNumber converted to a number primitive. + */ + P.toNumber = function () { + return +valueOf(this); + }; + + + /* + * Return a string representing the value of this BigNumber rounded to sd significant digits + * using rounding mode rm or ROUNDING_MODE. If sd is less than the number of digits + * necessary to represent the integer part of the value in fixed-point notation, then use + * exponential notation. + * + * [sd] {number} Significant digits. Integer, 1 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {sd|rm}' + */ + P.toPrecision = function (sd, rm) { + if (sd != null) intCheck(sd, 1, MAX); + return format(this, sd, rm, 2); + }; + + + /* + * Return a string representing the value of this BigNumber in base b, or base 10 if b is + * omitted. If a base is specified, including base 10, round according to DECIMAL_PLACES and + * ROUNDING_MODE. If a base is not specified, and this BigNumber has a positive exponent + * that is equal to or greater than TO_EXP_POS, or a negative exponent equal to or less than + * TO_EXP_NEG, return exponential notation. + * + * [b] {number} Integer, 2 to ALPHABET.length inclusive. + * + * '[BigNumber Error] Base {not a primitive number|not an integer|out of range}: {b}' + */ + P.toString = function (b) { + var str, + n = this, + s = n.s, + e = n.e; + + // Infinity or NaN? + if (e === null) { + if (s) { + str = 'Infinity'; + if (s < 0) str = '-' + str; + } else { + str = 'NaN'; + } + } else { + if (b == null) { + str = e <= TO_EXP_NEG || e >= TO_EXP_POS + ? toExponential(coeffToString(n.c), e) + : toFixedPoint(coeffToString(n.c), e, '0'); + } else if (b === 10 && alphabetHasNormalDecimalDigits) { + n = round(new BigNumber(n), DECIMAL_PLACES + e + 1, ROUNDING_MODE); + str = toFixedPoint(coeffToString(n.c), n.e, '0'); + } else { + intCheck(b, 2, ALPHABET.length, 'Base'); + str = convertBase(toFixedPoint(coeffToString(n.c), e, '0'), 10, b, s, true); + } + + if (s < 0 && n.c[0]) str = '-' + str; + } + + return str; + }; + + + /* + * Return as toString, but do not accept a base argument, and include the minus sign for + * negative zero. + */ + P.valueOf = P.toJSON = function () { + return valueOf(this); + }; + + + P._isBigNumber = true; + + if (configObject != null) BigNumber.set(configObject); + + return BigNumber; + } + + + // PRIVATE HELPER FUNCTIONS + + // These functions don't need access to variables, + // e.g. DECIMAL_PLACES, in the scope of the `clone` function above. + + + function bitFloor(n) { + var i = n | 0; + return n > 0 || n === i ? i : i - 1; + } + + + // Return a coefficient array as a string of base 10 digits. + function coeffToString(a) { + var s, z, + i = 1, + j = a.length, + r = a[0] + ''; + + for (; i < j;) { + s = a[i++] + ''; + z = LOG_BASE - s.length; + for (; z--; s = '0' + s); + r += s; + } + + // Determine trailing zeros. + for (j = r.length; r.charCodeAt(--j) === 48;); + + return r.slice(0, j + 1 || 1); + } + + + // Compare the value of BigNumbers x and y. + function compare(x, y) { + var a, b, + xc = x.c, + yc = y.c, + i = x.s, + j = y.s, + k = x.e, + l = y.e; + + // Either NaN? + if (!i || !j) return null; + + a = xc && !xc[0]; + b = yc && !yc[0]; + + // Either zero? + if (a || b) return a ? b ? 0 : -j : i; + + // Signs differ? + if (i != j) return i; + + a = i < 0; + b = k == l; + + // Either Infinity? + if (!xc || !yc) return b ? 0 : !xc ^ a ? 1 : -1; + + // Compare exponents. + if (!b) return k > l ^ a ? 1 : -1; + + j = (k = xc.length) < (l = yc.length) ? k : l; + + // Compare digit by digit. + for (i = 0; i < j; i++) if (xc[i] != yc[i]) return xc[i] > yc[i] ^ a ? 1 : -1; + + // Compare lengths. + return k == l ? 0 : k > l ^ a ? 1 : -1; + } + + + /* + * Check that n is a primitive number, an integer, and in range, otherwise throw. + */ + function intCheck(n, min, max, name) { + if (n < min || n > max || n !== mathfloor(n)) { + throw Error + (bignumberError + (name || 'Argument') + (typeof n == 'number' + ? n < min || n > max ? ' out of range: ' : ' not an integer: ' + : ' not a primitive number: ') + String(n)); + } + } + + + // Assumes finite n. + function isOdd(n) { + var k = n.c.length - 1; + return bitFloor(n.e / LOG_BASE) == k && n.c[k] % 2 != 0; + } + + + function toExponential(str, e) { + return (str.length > 1 ? str.charAt(0) + '.' + str.slice(1) : str) + + (e < 0 ? 'e' : 'e+') + e; + } + + + function toFixedPoint(str, e, z) { + var len, zs; + + // Negative exponent? + if (e < 0) { + + // Prepend zeros. + for (zs = z + '.'; ++e; zs += z); + str = zs + str; + + // Positive exponent + } else { + len = str.length; + + // Append zeros. + if (++e > len) { + for (zs = z, e -= len; --e; zs += z); + str += zs; + } else if (e < len) { + str = str.slice(0, e) + '.' + str.slice(e); + } + } + + return str; + } + + + // EXPORT + + + BigNumber = clone(); + BigNumber['default'] = BigNumber.BigNumber = BigNumber; + + // AMD. + if (typeof define == 'function' && define.amd) { + define(function () { return BigNumber; }); + + // Node.js and other environments that support module.exports. + } else if ( true && module.exports) { + module.exports = BigNumber; + + // Browser. + } else { + if (!globalObject) { + globalObject = typeof self != 'undefined' && self ? self : window; + } + + globalObject.BigNumber = BigNumber; + } +})(this); + + /***/ }), /***/ 4691: @@ -51001,6 +54580,55 @@ function expand(str, isTop) { +/***/ }), + +/***/ 9732: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +/*jshint node:true */ + +var Buffer = (__nccwpck_require__(181).Buffer); // browserify +var SlowBuffer = (__nccwpck_require__(181).SlowBuffer); + +module.exports = bufferEq; + +function bufferEq(a, b) { + + // shortcutting on type is necessary for correctness + if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) { + return false; + } + + // buffer sizes should be well-known information, so despite this + // shortcutting, it doesn't leak any information about the *contents* of the + // buffers. + if (a.length !== b.length) { + return false; + } + + var c = 0; + for (var i = 0; i < a.length; i++) { + /*jshint bitwise:false */ + c |= a[i] ^ b[i]; // XOR + } + return c === 0; +} + +bufferEq.install = function() { + Buffer.prototype.equal = SlowBuffer.prototype.equal = function equal(that) { + return bufferEq(this, that); + }; +}; + +var origBufEqual = Buffer.prototype.equal; +var origSlowBufEqual = SlowBuffer.prototype.equal; +bufferEq.restore = function() { + Buffer.prototype.equal = origBufEqual; + SlowBuffer.prototype.equal = origSlowBufEqual; +}; + + /***/ }), /***/ 5630: @@ -51236,6 +54864,850 @@ var isArray = Array.isArray || function (xs) { }; +/***/ }), + +/***/ 6110: +/***/ ((module, exports, __nccwpck_require__) => { + +/* eslint-env browser */ + +/** + * This is the web browser implementation of `debug()`. + */ + +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.storage = localstorage(); +exports.destroy = (() => { + let warned = false; + + return () => { + if (!warned) { + warned = true; + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + }; +})(); + +/** + * Colors. + */ + +exports.colors = [ + '#0000CC', + '#0000FF', + '#0033CC', + '#0033FF', + '#0066CC', + '#0066FF', + '#0099CC', + '#0099FF', + '#00CC00', + '#00CC33', + '#00CC66', + '#00CC99', + '#00CCCC', + '#00CCFF', + '#3300CC', + '#3300FF', + '#3333CC', + '#3333FF', + '#3366CC', + '#3366FF', + '#3399CC', + '#3399FF', + '#33CC00', + '#33CC33', + '#33CC66', + '#33CC99', + '#33CCCC', + '#33CCFF', + '#6600CC', + '#6600FF', + '#6633CC', + '#6633FF', + '#66CC00', + '#66CC33', + '#9900CC', + '#9900FF', + '#9933CC', + '#9933FF', + '#99CC00', + '#99CC33', + '#CC0000', + '#CC0033', + '#CC0066', + '#CC0099', + '#CC00CC', + '#CC00FF', + '#CC3300', + '#CC3333', + '#CC3366', + '#CC3399', + '#CC33CC', + '#CC33FF', + '#CC6600', + '#CC6633', + '#CC9900', + '#CC9933', + '#CCCC00', + '#CCCC33', + '#FF0000', + '#FF0033', + '#FF0066', + '#FF0099', + '#FF00CC', + '#FF00FF', + '#FF3300', + '#FF3333', + '#FF3366', + '#FF3399', + '#FF33CC', + '#FF33FF', + '#FF6600', + '#FF6633', + '#FF9900', + '#FF9933', + '#FFCC00', + '#FFCC33' +]; + +/** + * Currently only WebKit-based Web Inspectors, Firefox >= v31, + * and the Firebug extension (any Firefox version) are known + * to support "%c" CSS customizations. + * + * TODO: add a `localStorage` variable to explicitly enable/disable colors + */ + +// eslint-disable-next-line complexity +function useColors() { + // NB: In an Electron preload script, document will be defined but not fully + // initialized. Since we know we're in Chrome, we'll just detect this case + // explicitly + if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) { + return true; + } + + // Internet Explorer and Edge do not support colors. + if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { + return false; + } + + // Is webkit? http://stackoverflow.com/a/16459606/376773 + // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 + return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) || + // Is firebug? http://stackoverflow.com/a/398120/376773 + (typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) || + // Is firefox >= v31? + // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31) || + // Double check webkit in userAgent just in case we are in a worker + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)); +} + +/** + * Colorize log arguments if enabled. + * + * @api public + */ + +function formatArgs(args) { + args[0] = (this.useColors ? '%c' : '') + + this.namespace + + (this.useColors ? ' %c' : ' ') + + args[0] + + (this.useColors ? '%c ' : ' ') + + '+' + module.exports.humanize(this.diff); + + if (!this.useColors) { + return; + } + + const c = 'color: ' + this.color; + args.splice(1, 0, c, 'color: inherit'); + + // The final "%c" is somewhat tricky, because there could be other + // arguments passed either before or after the %c, so we need to + // figure out the correct index to insert the CSS into + let index = 0; + let lastC = 0; + args[0].replace(/%[a-zA-Z%]/g, match => { + if (match === '%%') { + return; + } + index++; + if (match === '%c') { + // We only are interested in the *last* %c + // (the user may have provided their own) + lastC = index; + } + }); + + args.splice(lastC, 0, c); +} + +/** + * Invokes `console.debug()` when available. + * No-op when `console.debug` is not a "function". + * If `console.debug` is not available, falls back + * to `console.log`. + * + * @api public + */ +exports.log = console.debug || console.log || (() => {}); + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + try { + if (namespaces) { + exports.storage.setItem('debug', namespaces); + } else { + exports.storage.removeItem('debug'); + } + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ +function load() { + let r; + try { + r = exports.storage.getItem('debug'); + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } + + // If debug isn't set in LS, and we're in Electron, try to load $DEBUG + if (!r && typeof process !== 'undefined' && 'env' in process) { + r = process.env.DEBUG; + } + + return r; +} + +/** + * Localstorage attempts to return the localstorage. + * + * This is necessary because safari throws + * when a user disables cookies/localstorage + * and you attempt to access it. + * + * @return {LocalStorage} + * @api private + */ + +function localstorage() { + try { + // TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context + // The Browser also has localStorage in the global context. + return localStorage; + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} + +module.exports = __nccwpck_require__(897)(exports); + +const {formatters} = module.exports; + +/** + * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. + */ + +formatters.j = function (v) { + try { + return JSON.stringify(v); + } catch (error) { + return '[UnexpectedJSONParseError]: ' + error.message; + } +}; + + +/***/ }), + +/***/ 897: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + + +/** + * This is the common logic for both the Node.js and web browser + * implementations of `debug()`. + */ + +function setup(env) { + createDebug.debug = createDebug; + createDebug.default = createDebug; + createDebug.coerce = coerce; + createDebug.disable = disable; + createDebug.enable = enable; + createDebug.enabled = enabled; + createDebug.humanize = __nccwpck_require__(744); + createDebug.destroy = destroy; + + Object.keys(env).forEach(key => { + createDebug[key] = env[key]; + }); + + /** + * The currently active debug mode names, and names to skip. + */ + + createDebug.names = []; + createDebug.skips = []; + + /** + * Map of special "%n" handling functions, for the debug "format" argument. + * + * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". + */ + createDebug.formatters = {}; + + /** + * Selects a color for a debug namespace + * @param {String} namespace The namespace string for the debug instance to be colored + * @return {Number|String} An ANSI color code for the given namespace + * @api private + */ + function selectColor(namespace) { + let hash = 0; + + for (let i = 0; i < namespace.length; i++) { + hash = ((hash << 5) - hash) + namespace.charCodeAt(i); + hash |= 0; // Convert to 32bit integer + } + + return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; + } + createDebug.selectColor = selectColor; + + /** + * Create a debugger with the given `namespace`. + * + * @param {String} namespace + * @return {Function} + * @api public + */ + function createDebug(namespace) { + let prevTime; + let enableOverride = null; + let namespacesCache; + let enabledCache; + + function debug(...args) { + // Disabled? + if (!debug.enabled) { + return; + } + + const self = debug; + + // Set `diff` timestamp + const curr = Number(new Date()); + const ms = curr - (prevTime || curr); + self.diff = ms; + self.prev = prevTime; + self.curr = curr; + prevTime = curr; + + args[0] = createDebug.coerce(args[0]); + + if (typeof args[0] !== 'string') { + // Anything else let's inspect with %O + args.unshift('%O'); + } + + // Apply any `formatters` transformations + let index = 0; + args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => { + // If we encounter an escaped % then don't increase the array index + if (match === '%%') { + return '%'; + } + index++; + const formatter = createDebug.formatters[format]; + if (typeof formatter === 'function') { + const val = args[index]; + match = formatter.call(self, val); + + // Now we need to remove `args[index]` since it's inlined in the `format` + args.splice(index, 1); + index--; + } + return match; + }); + + // Apply env-specific formatting (colors, etc.) + createDebug.formatArgs.call(self, args); + + const logFn = self.log || createDebug.log; + logFn.apply(self, args); + } + + debug.namespace = namespace; + debug.useColors = createDebug.useColors(); + debug.color = createDebug.selectColor(namespace); + debug.extend = extend; + debug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release. + + Object.defineProperty(debug, 'enabled', { + enumerable: true, + configurable: false, + get: () => { + if (enableOverride !== null) { + return enableOverride; + } + if (namespacesCache !== createDebug.namespaces) { + namespacesCache = createDebug.namespaces; + enabledCache = createDebug.enabled(namespace); + } + + return enabledCache; + }, + set: v => { + enableOverride = v; + } + }); + + // Env-specific initialization logic for debug instances + if (typeof createDebug.init === 'function') { + createDebug.init(debug); + } + + return debug; + } + + function extend(namespace, delimiter) { + const newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace); + newDebug.log = this.log; + return newDebug; + } + + /** + * Enables a debug mode by namespaces. This can include modes + * separated by a colon and wildcards. + * + * @param {String} namespaces + * @api public + */ + function enable(namespaces) { + createDebug.save(namespaces); + createDebug.namespaces = namespaces; + + createDebug.names = []; + createDebug.skips = []; + + let i; + const split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/); + const len = split.length; + + for (i = 0; i < len; i++) { + if (!split[i]) { + // ignore empty strings + continue; + } + + namespaces = split[i].replace(/\*/g, '.*?'); + + if (namespaces[0] === '-') { + createDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$')); + } else { + createDebug.names.push(new RegExp('^' + namespaces + '$')); + } + } + } + + /** + * Disable debug output. + * + * @return {String} namespaces + * @api public + */ + function disable() { + const namespaces = [ + ...createDebug.names.map(toNamespace), + ...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace) + ].join(','); + createDebug.enable(''); + return namespaces; + } + + /** + * Returns true if the given mode name is enabled, false otherwise. + * + * @param {String} name + * @return {Boolean} + * @api public + */ + function enabled(name) { + if (name[name.length - 1] === '*') { + return true; + } + + let i; + let len; + + for (i = 0, len = createDebug.skips.length; i < len; i++) { + if (createDebug.skips[i].test(name)) { + return false; + } + } + + for (i = 0, len = createDebug.names.length; i < len; i++) { + if (createDebug.names[i].test(name)) { + return true; + } + } + + return false; + } + + /** + * Convert regexp to namespace + * + * @param {RegExp} regxep + * @return {String} namespace + * @api private + */ + function toNamespace(regexp) { + return regexp.toString() + .substring(2, regexp.toString().length - 2) + .replace(/\.\*\?$/, '*'); + } + + /** + * Coerce `val`. + * + * @param {Mixed} val + * @return {Mixed} + * @api private + */ + function coerce(val) { + if (val instanceof Error) { + return val.stack || val.message; + } + return val; + } + + /** + * XXX DO NOT USE. This is a temporary stub function. + * XXX It WILL be removed in the next major release. + */ + function destroy() { + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + + createDebug.enable(createDebug.load()); + + return createDebug; +} + +module.exports = setup; + + +/***/ }), + +/***/ 2830: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/** + * Detect Electron renderer / nwjs process, which is node, but we should + * treat as a browser. + */ + +if (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) { + module.exports = __nccwpck_require__(6110); +} else { + module.exports = __nccwpck_require__(5108); +} + + +/***/ }), + +/***/ 5108: +/***/ ((module, exports, __nccwpck_require__) => { + +/** + * Module dependencies. + */ + +const tty = __nccwpck_require__(2018); +const util = __nccwpck_require__(9023); + +/** + * This is the Node.js implementation of `debug()`. + */ + +exports.init = init; +exports.log = log; +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.destroy = util.deprecate( + () => {}, + 'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.' +); + +/** + * Colors. + */ + +exports.colors = [6, 2, 3, 4, 5, 1]; + +try { + // Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json) + // eslint-disable-next-line import/no-extraneous-dependencies + const supportsColor = __nccwpck_require__(1450); + + if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) { + exports.colors = [ + 20, + 21, + 26, + 27, + 32, + 33, + 38, + 39, + 40, + 41, + 42, + 43, + 44, + 45, + 56, + 57, + 62, + 63, + 68, + 69, + 74, + 75, + 76, + 77, + 78, + 79, + 80, + 81, + 92, + 93, + 98, + 99, + 112, + 113, + 128, + 129, + 134, + 135, + 148, + 149, + 160, + 161, + 162, + 163, + 164, + 165, + 166, + 167, + 168, + 169, + 170, + 171, + 172, + 173, + 178, + 179, + 184, + 185, + 196, + 197, + 198, + 199, + 200, + 201, + 202, + 203, + 204, + 205, + 206, + 207, + 208, + 209, + 214, + 215, + 220, + 221 + ]; + } +} catch (error) { + // Swallow - we only care if `supports-color` is available; it doesn't have to be. +} + +/** + * Build up the default `inspectOpts` object from the environment variables. + * + * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js + */ + +exports.inspectOpts = Object.keys(process.env).filter(key => { + return /^debug_/i.test(key); +}).reduce((obj, key) => { + // Camel-case + const prop = key + .substring(6) + .toLowerCase() + .replace(/_([a-z])/g, (_, k) => { + return k.toUpperCase(); + }); + + // Coerce string value into JS value + let val = process.env[key]; + if (/^(yes|on|true|enabled)$/i.test(val)) { + val = true; + } else if (/^(no|off|false|disabled)$/i.test(val)) { + val = false; + } else if (val === 'null') { + val = null; + } else { + val = Number(val); + } + + obj[prop] = val; + return obj; +}, {}); + +/** + * Is stdout a TTY? Colored output is enabled when `true`. + */ + +function useColors() { + return 'colors' in exports.inspectOpts ? + Boolean(exports.inspectOpts.colors) : + tty.isatty(process.stderr.fd); +} + +/** + * Adds ANSI color escape codes if enabled. + * + * @api public + */ + +function formatArgs(args) { + const {namespace: name, useColors} = this; + + if (useColors) { + const c = this.color; + const colorCode = '\u001B[3' + (c < 8 ? c : '8;5;' + c); + const prefix = ` ${colorCode};1m${name} \u001B[0m`; + + args[0] = prefix + args[0].split('\n').join('\n' + prefix); + args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\u001B[0m'); + } else { + args[0] = getDate() + name + ' ' + args[0]; + } +} + +function getDate() { + if (exports.inspectOpts.hideDate) { + return ''; + } + return new Date().toISOString() + ' '; +} + +/** + * Invokes `util.format()` with the specified arguments and writes to stderr. + */ + +function log(...args) { + return process.stderr.write(util.format(...args) + '\n'); +} + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + if (namespaces) { + process.env.DEBUG = namespaces; + } else { + // If you set a process.env field to null or undefined, it gets cast to the + // string 'null' or 'undefined'. Just delete instead. + delete process.env.DEBUG; + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ + +function load() { + return process.env.DEBUG; +} + +/** + * Init logic for `debug` instances. + * + * Create a new `inspectOpts` object in case `useColors` is set + * differently for a particular `debug` instance. + */ + +function init(debug) { + debug.inspectOpts = {}; + + const keys = Object.keys(exports.inspectOpts); + for (let i = 0; i < keys.length; i++) { + debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]]; + } +} + +module.exports = __nccwpck_require__(897)(exports); + +const {formatters} = module.exports; + +/** + * Map %o to `util.inspect()`, all on a single line. + */ + +formatters.o = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts) + .split('\n') + .map(str => str.trim()) + .join(' '); +}; + +/** + * Map %O to `util.inspect()`, allowing multiple lines if needed. + */ + +formatters.O = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts); +}; + + /***/ }), /***/ 2710: @@ -51350,6 +55822,15373 @@ DelayedStream.prototype._checkIfMaxDataSizeExceeded = function() { }; +/***/ }), + +/***/ 9112: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var stream = __nccwpck_require__(6131) +var eos = __nccwpck_require__(1424) +var inherits = __nccwpck_require__(9598) +var shift = __nccwpck_require__(4633) + +var SIGNAL_FLUSH = (Buffer.from && Buffer.from !== Uint8Array.from) + ? Buffer.from([0]) + : new Buffer([0]) + +var onuncork = function(self, fn) { + if (self._corked) self.once('uncork', fn) + else fn() +} + +var autoDestroy = function (self, err) { + if (self._autoDestroy) self.destroy(err) +} + +var destroyer = function(self, end) { + return function(err) { + if (err) autoDestroy(self, err.message === 'premature close' ? null : err) + else if (end && !self._ended) self.end() + } +} + +var end = function(ws, fn) { + if (!ws) return fn() + if (ws._writableState && ws._writableState.finished) return fn() + if (ws._writableState) return ws.end(fn) + ws.end() + fn() +} + +var noop = function() {} + +var toStreams2 = function(rs) { + return new (stream.Readable)({objectMode:true, highWaterMark:16}).wrap(rs) +} + +var Duplexify = function(writable, readable, opts) { + if (!(this instanceof Duplexify)) return new Duplexify(writable, readable, opts) + stream.Duplex.call(this, opts) + + this._writable = null + this._readable = null + this._readable2 = null + + this._autoDestroy = !opts || opts.autoDestroy !== false + this._forwardDestroy = !opts || opts.destroy !== false + this._forwardEnd = !opts || opts.end !== false + this._corked = 1 // start corked + this._ondrain = null + this._drained = false + this._forwarding = false + this._unwrite = null + this._unread = null + this._ended = false + + this.destroyed = false + + if (writable) this.setWritable(writable) + if (readable) this.setReadable(readable) +} + +inherits(Duplexify, stream.Duplex) + +Duplexify.obj = function(writable, readable, opts) { + if (!opts) opts = {} + opts.objectMode = true + opts.highWaterMark = 16 + return new Duplexify(writable, readable, opts) +} + +Duplexify.prototype.cork = function() { + if (++this._corked === 1) this.emit('cork') +} + +Duplexify.prototype.uncork = function() { + if (this._corked && --this._corked === 0) this.emit('uncork') +} + +Duplexify.prototype.setWritable = function(writable) { + if (this._unwrite) this._unwrite() + + if (this.destroyed) { + if (writable && writable.destroy) writable.destroy() + return + } + + if (writable === null || writable === false) { + this.end() + return + } + + var self = this + var unend = eos(writable, {writable:true, readable:false}, destroyer(this, this._forwardEnd)) + + var ondrain = function() { + var ondrain = self._ondrain + self._ondrain = null + if (ondrain) ondrain() + } + + var clear = function() { + self._writable.removeListener('drain', ondrain) + unend() + } + + if (this._unwrite) process.nextTick(ondrain) // force a drain on stream reset to avoid livelocks + + this._writable = writable + this._writable.on('drain', ondrain) + this._unwrite = clear + + this.uncork() // always uncork setWritable +} + +Duplexify.prototype.setReadable = function(readable) { + if (this._unread) this._unread() + + if (this.destroyed) { + if (readable && readable.destroy) readable.destroy() + return + } + + if (readable === null || readable === false) { + this.push(null) + this.resume() + return + } + + var self = this + var unend = eos(readable, {writable:false, readable:true}, destroyer(this)) + + var onreadable = function() { + self._forward() + } + + var onend = function() { + self.push(null) + } + + var clear = function() { + self._readable2.removeListener('readable', onreadable) + self._readable2.removeListener('end', onend) + unend() + } + + this._drained = true + this._readable = readable + this._readable2 = readable._readableState ? readable : toStreams2(readable) + this._readable2.on('readable', onreadable) + this._readable2.on('end', onend) + this._unread = clear + + this._forward() +} + +Duplexify.prototype._read = function() { + this._drained = true + this._forward() +} + +Duplexify.prototype._forward = function() { + if (this._forwarding || !this._readable2 || !this._drained) return + this._forwarding = true + + var data + + while (this._drained && (data = shift(this._readable2)) !== null) { + if (this.destroyed) continue + this._drained = this.push(data) + } + + this._forwarding = false +} + +Duplexify.prototype.destroy = function(err, cb) { + if (!cb) cb = noop + if (this.destroyed) return cb(null) + this.destroyed = true + + var self = this + process.nextTick(function() { + self._destroy(err) + cb(null) + }) +} + +Duplexify.prototype._destroy = function(err) { + if (err) { + var ondrain = this._ondrain + this._ondrain = null + if (ondrain) ondrain(err) + else this.emit('error', err) + } + + if (this._forwardDestroy) { + if (this._readable && this._readable.destroy) this._readable.destroy() + if (this._writable && this._writable.destroy) this._writable.destroy() + } + + this.emit('close') +} + +Duplexify.prototype._write = function(data, enc, cb) { + if (this.destroyed) return + if (this._corked) return onuncork(this, this._write.bind(this, data, enc, cb)) + if (data === SIGNAL_FLUSH) return this._finish(cb) + if (!this._writable) return cb() + + if (this._writable.write(data) === false) this._ondrain = cb + else if (!this.destroyed) cb() +} + +Duplexify.prototype._finish = function(cb) { + var self = this + this.emit('preend') + onuncork(this, function() { + end(self._forwardEnd && self._writable, function() { + // haxx to not emit prefinish twice + if (self._writableState.prefinished === false) self._writableState.prefinished = true + self.emit('prefinish') + onuncork(self, cb) + }) + }) +} + +Duplexify.prototype.end = function(data, enc, cb) { + if (typeof data === 'function') return this.end(null, null, data) + if (typeof enc === 'function') return this.end(data, null, enc) + this._ended = true + if (data) this.write(data) + if (!this._writableState.ending && !this._writableState.destroyed) this.write(SIGNAL_FLUSH) + return stream.Writable.prototype.end.call(this, cb) +} + +module.exports = Duplexify + + +/***/ }), + +/***/ 325: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var Buffer = (__nccwpck_require__(3058).Buffer); + +var getParamBytesForAlg = __nccwpck_require__(5028); + +var MAX_OCTET = 0x80, + CLASS_UNIVERSAL = 0, + PRIMITIVE_BIT = 0x20, + TAG_SEQ = 0x10, + TAG_INT = 0x02, + ENCODED_TAG_SEQ = (TAG_SEQ | PRIMITIVE_BIT) | (CLASS_UNIVERSAL << 6), + ENCODED_TAG_INT = TAG_INT | (CLASS_UNIVERSAL << 6); + +function base64Url(base64) { + return base64 + .replace(/=/g, '') + .replace(/\+/g, '-') + .replace(/\//g, '_'); +} + +function signatureAsBuffer(signature) { + if (Buffer.isBuffer(signature)) { + return signature; + } else if ('string' === typeof signature) { + return Buffer.from(signature, 'base64'); + } + + throw new TypeError('ECDSA signature must be a Base64 string or a Buffer'); +} + +function derToJose(signature, alg) { + signature = signatureAsBuffer(signature); + var paramBytes = getParamBytesForAlg(alg); + + // the DER encoded param should at most be the param size, plus a padding + // zero, since due to being a signed integer + var maxEncodedParamLength = paramBytes + 1; + + var inputLength = signature.length; + + var offset = 0; + if (signature[offset++] !== ENCODED_TAG_SEQ) { + throw new Error('Could not find expected "seq"'); + } + + var seqLength = signature[offset++]; + if (seqLength === (MAX_OCTET | 1)) { + seqLength = signature[offset++]; + } + + if (inputLength - offset < seqLength) { + throw new Error('"seq" specified length of "' + seqLength + '", only "' + (inputLength - offset) + '" remaining'); + } + + if (signature[offset++] !== ENCODED_TAG_INT) { + throw new Error('Could not find expected "int" for "r"'); + } + + var rLength = signature[offset++]; + + if (inputLength - offset - 2 < rLength) { + throw new Error('"r" specified length of "' + rLength + '", only "' + (inputLength - offset - 2) + '" available'); + } + + if (maxEncodedParamLength < rLength) { + throw new Error('"r" specified length of "' + rLength + '", max of "' + maxEncodedParamLength + '" is acceptable'); + } + + var rOffset = offset; + offset += rLength; + + if (signature[offset++] !== ENCODED_TAG_INT) { + throw new Error('Could not find expected "int" for "s"'); + } + + var sLength = signature[offset++]; + + if (inputLength - offset !== sLength) { + throw new Error('"s" specified length of "' + sLength + '", expected "' + (inputLength - offset) + '"'); + } + + if (maxEncodedParamLength < sLength) { + throw new Error('"s" specified length of "' + sLength + '", max of "' + maxEncodedParamLength + '" is acceptable'); + } + + var sOffset = offset; + offset += sLength; + + if (offset !== inputLength) { + throw new Error('Expected to consume entire buffer, but "' + (inputLength - offset) + '" bytes remain'); + } + + var rPadding = paramBytes - rLength, + sPadding = paramBytes - sLength; + + var dst = Buffer.allocUnsafe(rPadding + rLength + sPadding + sLength); + + for (offset = 0; offset < rPadding; ++offset) { + dst[offset] = 0; + } + signature.copy(dst, offset, rOffset + Math.max(-rPadding, 0), rOffset + rLength); + + offset = paramBytes; + + for (var o = offset; offset < o + sPadding; ++offset) { + dst[offset] = 0; + } + signature.copy(dst, offset, sOffset + Math.max(-sPadding, 0), sOffset + sLength); + + dst = dst.toString('base64'); + dst = base64Url(dst); + + return dst; +} + +function countPadding(buf, start, stop) { + var padding = 0; + while (start + padding < stop && buf[start + padding] === 0) { + ++padding; + } + + var needsSign = buf[start + padding] >= MAX_OCTET; + if (needsSign) { + --padding; + } + + return padding; +} + +function joseToDer(signature, alg) { + signature = signatureAsBuffer(signature); + var paramBytes = getParamBytesForAlg(alg); + + var signatureBytes = signature.length; + if (signatureBytes !== paramBytes * 2) { + throw new TypeError('"' + alg + '" signatures must be "' + paramBytes * 2 + '" bytes, saw "' + signatureBytes + '"'); + } + + var rPadding = countPadding(signature, 0, paramBytes); + var sPadding = countPadding(signature, paramBytes, signature.length); + var rLength = paramBytes - rPadding; + var sLength = paramBytes - sPadding; + + var rsBytes = 1 + 1 + rLength + 1 + 1 + sLength; + + var shortLength = rsBytes < MAX_OCTET; + + var dst = Buffer.allocUnsafe((shortLength ? 2 : 3) + rsBytes); + + var offset = 0; + dst[offset++] = ENCODED_TAG_SEQ; + if (shortLength) { + // Bit 8 has value "0" + // bits 7-1 give the length. + dst[offset++] = rsBytes; + } else { + // Bit 8 of first octet has value "1" + // bits 7-1 give the number of additional length octets. + dst[offset++] = MAX_OCTET | 1; + // length, base 256 + dst[offset++] = rsBytes & 0xff; + } + dst[offset++] = ENCODED_TAG_INT; + dst[offset++] = rLength; + if (rPadding < 0) { + dst[offset++] = 0; + offset += signature.copy(dst, offset, 0, paramBytes); + } else { + offset += signature.copy(dst, offset, rPadding, paramBytes); + } + dst[offset++] = ENCODED_TAG_INT; + dst[offset++] = sLength; + if (sPadding < 0) { + dst[offset++] = 0; + signature.copy(dst, offset, paramBytes); + } else { + signature.copy(dst, offset, paramBytes + sPadding); + } + + return dst; +} + +module.exports = { + derToJose: derToJose, + joseToDer: joseToDer +}; + + +/***/ }), + +/***/ 5028: +/***/ ((module) => { + +"use strict"; + + +function getParamSize(keySize) { + var result = ((keySize / 8) | 0) + (keySize % 8 === 0 ? 0 : 1); + return result; +} + +var paramBytesForAlg = { + ES256: getParamSize(256), + ES384: getParamSize(384), + ES512: getParamSize(521) +}; + +function getParamBytesForAlg(alg) { + var paramBytes = paramBytesForAlg[alg]; + if (paramBytes) { + return paramBytes; + } + + throw new Error('Unknown algorithm "' + alg + '"'); +} + +module.exports = getParamBytesForAlg; + + +/***/ }), + +/***/ 1424: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var once = __nccwpck_require__(5560); + +var noop = function() {}; + +var isRequest = function(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +}; + +var isChildProcess = function(stream) { + return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3 +}; + +var eos = function(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + + callback = once(callback || noop); + + var ws = stream._writableState; + var rs = stream._readableState; + var readable = opts.readable || (opts.readable !== false && stream.readable); + var writable = opts.writable || (opts.writable !== false && stream.writable); + var cancelled = false; + + var onlegacyfinish = function() { + if (!stream.writable) onfinish(); + }; + + var onfinish = function() { + writable = false; + if (!readable) callback.call(stream); + }; + + var onend = function() { + readable = false; + if (!writable) callback.call(stream); + }; + + var onexit = function(exitCode) { + callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null); + }; + + var onerror = function(err) { + callback.call(stream, err); + }; + + var onclose = function() { + process.nextTick(onclosenexttick); + }; + + var onclosenexttick = function() { + if (cancelled) return; + if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close')); + if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close')); + }; + + var onrequest = function() { + stream.req.on('finish', onfinish); + }; + + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest(); + else stream.on('request', onrequest); + } else if (writable && !ws) { // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } + + if (isChildProcess(stream)) stream.on('exit', onexit); + + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + + return function() { + cancelled = true; + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('exit', onexit); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +}; + +module.exports = eos; + + +/***/ }), + +/***/ 6577: +/***/ ((module, exports) => { + +"use strict"; +/** + * @author Toru Nagashima + * @copyright 2015 Toru Nagashima. All rights reserved. + * See LICENSE file in root directory for full license. + */ + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +/** + * @typedef {object} PrivateData + * @property {EventTarget} eventTarget The event target. + * @property {{type:string}} event The original event object. + * @property {number} eventPhase The current event phase. + * @property {EventTarget|null} currentTarget The current event target. + * @property {boolean} canceled The flag to prevent default. + * @property {boolean} stopped The flag to stop propagation. + * @property {boolean} immediateStopped The flag to stop propagation immediately. + * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null. + * @property {number} timeStamp The unix time. + * @private + */ + +/** + * Private data for event wrappers. + * @type {WeakMap} + * @private + */ +const privateData = new WeakMap(); + +/** + * Cache for wrapper classes. + * @type {WeakMap} + * @private + */ +const wrappers = new WeakMap(); + +/** + * Get private data. + * @param {Event} event The event object to get private data. + * @returns {PrivateData} The private data of the event. + * @private + */ +function pd(event) { + const retv = privateData.get(event); + console.assert( + retv != null, + "'this' is expected an Event object, but got", + event + ); + return retv +} + +/** + * https://dom.spec.whatwg.org/#set-the-canceled-flag + * @param data {PrivateData} private data. + */ +function setCancelFlag(data) { + if (data.passiveListener != null) { + if ( + typeof console !== "undefined" && + typeof console.error === "function" + ) { + console.error( + "Unable to preventDefault inside passive event listener invocation.", + data.passiveListener + ); + } + return + } + if (!data.event.cancelable) { + return + } + + data.canceled = true; + if (typeof data.event.preventDefault === "function") { + data.event.preventDefault(); + } +} + +/** + * @see https://dom.spec.whatwg.org/#interface-event + * @private + */ +/** + * The event wrapper. + * @constructor + * @param {EventTarget} eventTarget The event target of this dispatching. + * @param {Event|{type:string}} event The original event to wrap. + */ +function Event(eventTarget, event) { + privateData.set(this, { + eventTarget, + event, + eventPhase: 2, + currentTarget: eventTarget, + canceled: false, + stopped: false, + immediateStopped: false, + passiveListener: null, + timeStamp: event.timeStamp || Date.now(), + }); + + // https://heycam.github.io/webidl/#Unforgeable + Object.defineProperty(this, "isTrusted", { value: false, enumerable: true }); + + // Define accessors + const keys = Object.keys(event); + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (!(key in this)) { + Object.defineProperty(this, key, defineRedirectDescriptor(key)); + } + } +} + +// Should be enumerable, but class methods are not enumerable. +Event.prototype = { + /** + * The type of this event. + * @type {string} + */ + get type() { + return pd(this).event.type + }, + + /** + * The target of this event. + * @type {EventTarget} + */ + get target() { + return pd(this).eventTarget + }, + + /** + * The target of this event. + * @type {EventTarget} + */ + get currentTarget() { + return pd(this).currentTarget + }, + + /** + * @returns {EventTarget[]} The composed path of this event. + */ + composedPath() { + const currentTarget = pd(this).currentTarget; + if (currentTarget == null) { + return [] + } + return [currentTarget] + }, + + /** + * Constant of NONE. + * @type {number} + */ + get NONE() { + return 0 + }, + + /** + * Constant of CAPTURING_PHASE. + * @type {number} + */ + get CAPTURING_PHASE() { + return 1 + }, + + /** + * Constant of AT_TARGET. + * @type {number} + */ + get AT_TARGET() { + return 2 + }, + + /** + * Constant of BUBBLING_PHASE. + * @type {number} + */ + get BUBBLING_PHASE() { + return 3 + }, + + /** + * The target of this event. + * @type {number} + */ + get eventPhase() { + return pd(this).eventPhase + }, + + /** + * Stop event bubbling. + * @returns {void} + */ + stopPropagation() { + const data = pd(this); + + data.stopped = true; + if (typeof data.event.stopPropagation === "function") { + data.event.stopPropagation(); + } + }, + + /** + * Stop event bubbling. + * @returns {void} + */ + stopImmediatePropagation() { + const data = pd(this); + + data.stopped = true; + data.immediateStopped = true; + if (typeof data.event.stopImmediatePropagation === "function") { + data.event.stopImmediatePropagation(); + } + }, + + /** + * The flag to be bubbling. + * @type {boolean} + */ + get bubbles() { + return Boolean(pd(this).event.bubbles) + }, + + /** + * The flag to be cancelable. + * @type {boolean} + */ + get cancelable() { + return Boolean(pd(this).event.cancelable) + }, + + /** + * Cancel this event. + * @returns {void} + */ + preventDefault() { + setCancelFlag(pd(this)); + }, + + /** + * The flag to indicate cancellation state. + * @type {boolean} + */ + get defaultPrevented() { + return pd(this).canceled + }, + + /** + * The flag to be composed. + * @type {boolean} + */ + get composed() { + return Boolean(pd(this).event.composed) + }, + + /** + * The unix time of this event. + * @type {number} + */ + get timeStamp() { + return pd(this).timeStamp + }, + + /** + * The target of this event. + * @type {EventTarget} + * @deprecated + */ + get srcElement() { + return pd(this).eventTarget + }, + + /** + * The flag to stop event bubbling. + * @type {boolean} + * @deprecated + */ + get cancelBubble() { + return pd(this).stopped + }, + set cancelBubble(value) { + if (!value) { + return + } + const data = pd(this); + + data.stopped = true; + if (typeof data.event.cancelBubble === "boolean") { + data.event.cancelBubble = true; + } + }, + + /** + * The flag to indicate cancellation state. + * @type {boolean} + * @deprecated + */ + get returnValue() { + return !pd(this).canceled + }, + set returnValue(value) { + if (!value) { + setCancelFlag(pd(this)); + } + }, + + /** + * Initialize this event object. But do nothing under event dispatching. + * @param {string} type The event type. + * @param {boolean} [bubbles=false] The flag to be possible to bubble up. + * @param {boolean} [cancelable=false] The flag to be possible to cancel. + * @deprecated + */ + initEvent() { + // Do nothing. + }, +}; + +// `constructor` is not enumerable. +Object.defineProperty(Event.prototype, "constructor", { + value: Event, + configurable: true, + writable: true, +}); + +// Ensure `event instanceof window.Event` is `true`. +if (typeof window !== "undefined" && typeof window.Event !== "undefined") { + Object.setPrototypeOf(Event.prototype, window.Event.prototype); + + // Make association for wrappers. + wrappers.set(window.Event.prototype, Event); +} + +/** + * Get the property descriptor to redirect a given property. + * @param {string} key Property name to define property descriptor. + * @returns {PropertyDescriptor} The property descriptor to redirect the property. + * @private + */ +function defineRedirectDescriptor(key) { + return { + get() { + return pd(this).event[key] + }, + set(value) { + pd(this).event[key] = value; + }, + configurable: true, + enumerable: true, + } +} + +/** + * Get the property descriptor to call a given method property. + * @param {string} key Property name to define property descriptor. + * @returns {PropertyDescriptor} The property descriptor to call the method property. + * @private + */ +function defineCallDescriptor(key) { + return { + value() { + const event = pd(this).event; + return event[key].apply(event, arguments) + }, + configurable: true, + enumerable: true, + } +} + +/** + * Define new wrapper class. + * @param {Function} BaseEvent The base wrapper class. + * @param {Object} proto The prototype of the original event. + * @returns {Function} The defined wrapper class. + * @private + */ +function defineWrapper(BaseEvent, proto) { + const keys = Object.keys(proto); + if (keys.length === 0) { + return BaseEvent + } + + /** CustomEvent */ + function CustomEvent(eventTarget, event) { + BaseEvent.call(this, eventTarget, event); + } + + CustomEvent.prototype = Object.create(BaseEvent.prototype, { + constructor: { value: CustomEvent, configurable: true, writable: true }, + }); + + // Define accessors. + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (!(key in BaseEvent.prototype)) { + const descriptor = Object.getOwnPropertyDescriptor(proto, key); + const isFunc = typeof descriptor.value === "function"; + Object.defineProperty( + CustomEvent.prototype, + key, + isFunc + ? defineCallDescriptor(key) + : defineRedirectDescriptor(key) + ); + } + } + + return CustomEvent +} + +/** + * Get the wrapper class of a given prototype. + * @param {Object} proto The prototype of the original event to get its wrapper. + * @returns {Function} The wrapper class. + * @private + */ +function getWrapper(proto) { + if (proto == null || proto === Object.prototype) { + return Event + } + + let wrapper = wrappers.get(proto); + if (wrapper == null) { + wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto); + wrappers.set(proto, wrapper); + } + return wrapper +} + +/** + * Wrap a given event to management a dispatching. + * @param {EventTarget} eventTarget The event target of this dispatching. + * @param {Object} event The event to wrap. + * @returns {Event} The wrapper instance. + * @private + */ +function wrapEvent(eventTarget, event) { + const Wrapper = getWrapper(Object.getPrototypeOf(event)); + return new Wrapper(eventTarget, event) +} + +/** + * Get the immediateStopped flag of a given event. + * @param {Event} event The event to get. + * @returns {boolean} The flag to stop propagation immediately. + * @private + */ +function isStopped(event) { + return pd(event).immediateStopped +} + +/** + * Set the current event phase of a given event. + * @param {Event} event The event to set current target. + * @param {number} eventPhase New event phase. + * @returns {void} + * @private + */ +function setEventPhase(event, eventPhase) { + pd(event).eventPhase = eventPhase; +} + +/** + * Set the current target of a given event. + * @param {Event} event The event to set current target. + * @param {EventTarget|null} currentTarget New current target. + * @returns {void} + * @private + */ +function setCurrentTarget(event, currentTarget) { + pd(event).currentTarget = currentTarget; +} + +/** + * Set a passive listener of a given event. + * @param {Event} event The event to set current target. + * @param {Function|null} passiveListener New passive listener. + * @returns {void} + * @private + */ +function setPassiveListener(event, passiveListener) { + pd(event).passiveListener = passiveListener; +} + +/** + * @typedef {object} ListenerNode + * @property {Function} listener + * @property {1|2|3} listenerType + * @property {boolean} passive + * @property {boolean} once + * @property {ListenerNode|null} next + * @private + */ + +/** + * @type {WeakMap>} + * @private + */ +const listenersMap = new WeakMap(); + +// Listener types +const CAPTURE = 1; +const BUBBLE = 2; +const ATTRIBUTE = 3; + +/** + * Check whether a given value is an object or not. + * @param {any} x The value to check. + * @returns {boolean} `true` if the value is an object. + */ +function isObject(x) { + return x !== null && typeof x === "object" //eslint-disable-line no-restricted-syntax +} + +/** + * Get listeners. + * @param {EventTarget} eventTarget The event target to get. + * @returns {Map} The listeners. + * @private + */ +function getListeners(eventTarget) { + const listeners = listenersMap.get(eventTarget); + if (listeners == null) { + throw new TypeError( + "'this' is expected an EventTarget object, but got another value." + ) + } + return listeners +} + +/** + * Get the property descriptor for the event attribute of a given event. + * @param {string} eventName The event name to get property descriptor. + * @returns {PropertyDescriptor} The property descriptor. + * @private + */ +function defineEventAttributeDescriptor(eventName) { + return { + get() { + const listeners = getListeners(this); + let node = listeners.get(eventName); + while (node != null) { + if (node.listenerType === ATTRIBUTE) { + return node.listener + } + node = node.next; + } + return null + }, + + set(listener) { + if (typeof listener !== "function" && !isObject(listener)) { + listener = null; // eslint-disable-line no-param-reassign + } + const listeners = getListeners(this); + + // Traverse to the tail while removing old value. + let prev = null; + let node = listeners.get(eventName); + while (node != null) { + if (node.listenerType === ATTRIBUTE) { + // Remove old value. + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + } else { + prev = node; + } + + node = node.next; + } + + // Add new value. + if (listener !== null) { + const newNode = { + listener, + listenerType: ATTRIBUTE, + passive: false, + once: false, + next: null, + }; + if (prev === null) { + listeners.set(eventName, newNode); + } else { + prev.next = newNode; + } + } + }, + configurable: true, + enumerable: true, + } +} + +/** + * Define an event attribute (e.g. `eventTarget.onclick`). + * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite. + * @param {string} eventName The event name to define. + * @returns {void} + */ +function defineEventAttribute(eventTargetPrototype, eventName) { + Object.defineProperty( + eventTargetPrototype, + `on${eventName}`, + defineEventAttributeDescriptor(eventName) + ); +} + +/** + * Define a custom EventTarget with event attributes. + * @param {string[]} eventNames Event names for event attributes. + * @returns {EventTarget} The custom EventTarget. + * @private + */ +function defineCustomEventTarget(eventNames) { + /** CustomEventTarget */ + function CustomEventTarget() { + EventTarget.call(this); + } + + CustomEventTarget.prototype = Object.create(EventTarget.prototype, { + constructor: { + value: CustomEventTarget, + configurable: true, + writable: true, + }, + }); + + for (let i = 0; i < eventNames.length; ++i) { + defineEventAttribute(CustomEventTarget.prototype, eventNames[i]); + } + + return CustomEventTarget +} + +/** + * EventTarget. + * + * - This is constructor if no arguments. + * - This is a function which returns a CustomEventTarget constructor if there are arguments. + * + * For example: + * + * class A extends EventTarget {} + * class B extends EventTarget("message") {} + * class C extends EventTarget("message", "error") {} + * class D extends EventTarget(["message", "error"]) {} + */ +function EventTarget() { + /*eslint-disable consistent-return */ + if (this instanceof EventTarget) { + listenersMap.set(this, new Map()); + return + } + if (arguments.length === 1 && Array.isArray(arguments[0])) { + return defineCustomEventTarget(arguments[0]) + } + if (arguments.length > 0) { + const types = new Array(arguments.length); + for (let i = 0; i < arguments.length; ++i) { + types[i] = arguments[i]; + } + return defineCustomEventTarget(types) + } + throw new TypeError("Cannot call a class as a function") + /*eslint-enable consistent-return */ +} + +// Should be enumerable, but class methods are not enumerable. +EventTarget.prototype = { + /** + * Add a given listener to this event target. + * @param {string} eventName The event name to add. + * @param {Function} listener The listener to add. + * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. + * @returns {void} + */ + addEventListener(eventName, listener, options) { + if (listener == null) { + return + } + if (typeof listener !== "function" && !isObject(listener)) { + throw new TypeError("'listener' should be a function or an object.") + } + + const listeners = getListeners(this); + const optionsIsObj = isObject(options); + const capture = optionsIsObj + ? Boolean(options.capture) + : Boolean(options); + const listenerType = capture ? CAPTURE : BUBBLE; + const newNode = { + listener, + listenerType, + passive: optionsIsObj && Boolean(options.passive), + once: optionsIsObj && Boolean(options.once), + next: null, + }; + + // Set it as the first node if the first node is null. + let node = listeners.get(eventName); + if (node === undefined) { + listeners.set(eventName, newNode); + return + } + + // Traverse to the tail while checking duplication.. + let prev = null; + while (node != null) { + if ( + node.listener === listener && + node.listenerType === listenerType + ) { + // Should ignore duplication. + return + } + prev = node; + node = node.next; + } + + // Add it. + prev.next = newNode; + }, + + /** + * Remove a given listener from this event target. + * @param {string} eventName The event name to remove. + * @param {Function} listener The listener to remove. + * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. + * @returns {void} + */ + removeEventListener(eventName, listener, options) { + if (listener == null) { + return + } + + const listeners = getListeners(this); + const capture = isObject(options) + ? Boolean(options.capture) + : Boolean(options); + const listenerType = capture ? CAPTURE : BUBBLE; + + let prev = null; + let node = listeners.get(eventName); + while (node != null) { + if ( + node.listener === listener && + node.listenerType === listenerType + ) { + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + return + } + + prev = node; + node = node.next; + } + }, + + /** + * Dispatch a given event. + * @param {Event|{type:string}} event The event to dispatch. + * @returns {boolean} `false` if canceled. + */ + dispatchEvent(event) { + if (event == null || typeof event.type !== "string") { + throw new TypeError('"event.type" should be a string.') + } + + // If listeners aren't registered, terminate. + const listeners = getListeners(this); + const eventName = event.type; + let node = listeners.get(eventName); + if (node == null) { + return true + } + + // Since we cannot rewrite several properties, so wrap object. + const wrappedEvent = wrapEvent(this, event); + + // This doesn't process capturing phase and bubbling phase. + // This isn't participating in a tree. + let prev = null; + while (node != null) { + // Remove this listener if it's once + if (node.once) { + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + } else { + prev = node; + } + + // Call this listener + setPassiveListener( + wrappedEvent, + node.passive ? node.listener : null + ); + if (typeof node.listener === "function") { + try { + node.listener.call(this, wrappedEvent); + } catch (err) { + if ( + typeof console !== "undefined" && + typeof console.error === "function" + ) { + console.error(err); + } + } + } else if ( + node.listenerType !== ATTRIBUTE && + typeof node.listener.handleEvent === "function" + ) { + node.listener.handleEvent(wrappedEvent); + } + + // Break if `event.stopImmediatePropagation` was called. + if (isStopped(wrappedEvent)) { + break + } + + node = node.next; + } + setPassiveListener(wrappedEvent, null); + setEventPhase(wrappedEvent, 0); + setCurrentTarget(wrappedEvent, null); + + return !wrappedEvent.defaultPrevented + }, +}; + +// `constructor` is not enumerable. +Object.defineProperty(EventTarget.prototype, "constructor", { + value: EventTarget, + configurable: true, + writable: true, +}); + +// Ensure `eventTarget instanceof window.EventTarget` is `true`. +if ( + typeof window !== "undefined" && + typeof window.EventTarget !== "undefined" +) { + Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype); +} + +exports.defineEventAttribute = defineEventAttribute; +exports.EventTarget = EventTarget; +exports["default"] = EventTarget; + +module.exports = EventTarget +module.exports.EventTarget = module.exports["default"] = EventTarget +module.exports.defineEventAttribute = defineEventAttribute +//# sourceMappingURL=event-target-shim.js.map + + +/***/ }), + +/***/ 3860: +/***/ ((module) => { + +"use strict"; + + +var hasOwn = Object.prototype.hasOwnProperty; +var toStr = Object.prototype.toString; +var defineProperty = Object.defineProperty; +var gOPD = Object.getOwnPropertyDescriptor; + +var isArray = function isArray(arr) { + if (typeof Array.isArray === 'function') { + return Array.isArray(arr); + } + + return toStr.call(arr) === '[object Array]'; +}; + +var isPlainObject = function isPlainObject(obj) { + if (!obj || toStr.call(obj) !== '[object Object]') { + return false; + } + + var hasOwnConstructor = hasOwn.call(obj, 'constructor'); + var hasIsPrototypeOf = obj.constructor && obj.constructor.prototype && hasOwn.call(obj.constructor.prototype, 'isPrototypeOf'); + // Not own constructor property must be Object + if (obj.constructor && !hasOwnConstructor && !hasIsPrototypeOf) { + return false; + } + + // Own properties are enumerated firstly, so to speed up, + // if last one is own, then all properties are own. + var key; + for (key in obj) { /**/ } + + return typeof key === 'undefined' || hasOwn.call(obj, key); +}; + +// If name is '__proto__', and Object.defineProperty is available, define __proto__ as an own property on target +var setProperty = function setProperty(target, options) { + if (defineProperty && options.name === '__proto__') { + defineProperty(target, options.name, { + enumerable: true, + configurable: true, + value: options.newValue, + writable: true + }); + } else { + target[options.name] = options.newValue; + } +}; + +// Return undefined instead of __proto__ if '__proto__' is not an own property +var getProperty = function getProperty(obj, name) { + if (name === '__proto__') { + if (!hasOwn.call(obj, name)) { + return void 0; + } else if (gOPD) { + // In early versions of node, obj['__proto__'] is buggy when obj has + // __proto__ as an own property. Object.getOwnPropertyDescriptor() works. + return gOPD(obj, name).value; + } + } + + return obj[name]; +}; + +module.exports = function extend() { + var options, name, src, copy, copyIsArray, clone; + var target = arguments[0]; + var i = 1; + var length = arguments.length; + var deep = false; + + // Handle a deep copy situation + if (typeof target === 'boolean') { + deep = target; + target = arguments[1] || {}; + // skip the boolean and the target + i = 2; + } + if (target == null || (typeof target !== 'object' && typeof target !== 'function')) { + target = {}; + } + + for (; i < length; ++i) { + options = arguments[i]; + // Only deal with non-null/undefined values + if (options != null) { + // Extend the base object + for (name in options) { + src = getProperty(target, name); + copy = getProperty(options, name); + + // Prevent never-ending loop + if (target !== copy) { + // Recurse if we're merging plain objects or arrays + if (deep && copy && (isPlainObject(copy) || (copyIsArray = isArray(copy)))) { + if (copyIsArray) { + copyIsArray = false; + clone = src && isArray(src) ? src : []; + } else { + clone = src && isPlainObject(src) ? src : {}; + } + + // Never move original objects, clone them + setProperty(target, { name: name, newValue: extend(deep, clone, copy) }); + + // Don't bring in undefined values + } else if (typeof copy !== 'undefined') { + setProperty(target, { name: name, newValue: copy }); + } + } + } + } + } + + // Return the modified object + return target; +}; + + +/***/ }), + +/***/ 9741: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const validator = __nccwpck_require__(9433); +const XMLParser = __nccwpck_require__(9844); +const XMLBuilder = __nccwpck_require__(659); + +module.exports = { + XMLParser: XMLParser, + XMLValidator: validator, + XMLBuilder: XMLBuilder +} + +/***/ }), + +/***/ 812: +/***/ ((module) => { + +function getIgnoreAttributesFn(ignoreAttributes) { + if (typeof ignoreAttributes === 'function') { + return ignoreAttributes + } + if (Array.isArray(ignoreAttributes)) { + return (attrName) => { + for (const pattern of ignoreAttributes) { + if (typeof pattern === 'string' && attrName === pattern) { + return true + } + if (pattern instanceof RegExp && pattern.test(attrName)) { + return true + } + } + } + } + return () => false +} + +module.exports = getIgnoreAttributesFn + +/***/ }), + +/***/ 7019: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +const nameStartChar = ':A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD'; +const nameChar = nameStartChar + '\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040'; +const nameRegexp = '[' + nameStartChar + '][' + nameChar + ']*' +const regexName = new RegExp('^' + nameRegexp + '$'); + +const getAllMatches = function(string, regex) { + const matches = []; + let match = regex.exec(string); + while (match) { + const allmatches = []; + allmatches.startIndex = regex.lastIndex - match[0].length; + const len = match.length; + for (let index = 0; index < len; index++) { + allmatches.push(match[index]); + } + matches.push(allmatches); + match = regex.exec(string); + } + return matches; +}; + +const isName = function(string) { + const match = regexName.exec(string); + return !(match === null || typeof match === 'undefined'); +}; + +exports.isExist = function(v) { + return typeof v !== 'undefined'; +}; + +exports.isEmptyObject = function(obj) { + return Object.keys(obj).length === 0; +}; + +/** + * Copy all the properties of a into b. + * @param {*} target + * @param {*} a + */ +exports.merge = function(target, a, arrayMode) { + if (a) { + const keys = Object.keys(a); // will return an array of own properties + const len = keys.length; //don't make it inline + for (let i = 0; i < len; i++) { + if (arrayMode === 'strict') { + target[keys[i]] = [ a[keys[i]] ]; + } else { + target[keys[i]] = a[keys[i]]; + } + } + } +}; +/* exports.merge =function (b,a){ + return Object.assign(b,a); +} */ + +exports.getValue = function(v) { + if (exports.isExist(v)) { + return v; + } else { + return ''; + } +}; + +// const fakeCall = function(a) {return a;}; +// const fakeCallNoReturn = function() {}; + +exports.isName = isName; +exports.getAllMatches = getAllMatches; +exports.nameRegexp = nameRegexp; + + +/***/ }), + +/***/ 9433: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +const util = __nccwpck_require__(7019); + +const defaultOptions = { + allowBooleanAttributes: false, //A tag can have attributes without any value + unpairedTags: [] +}; + +//const tagsPattern = new RegExp("<\\/?([\\w:\\-_\.]+)\\s*\/?>","g"); +exports.validate = function (xmlData, options) { + options = Object.assign({}, defaultOptions, options); + + //xmlData = xmlData.replace(/(\r\n|\n|\r)/gm,"");//make it single line + //xmlData = xmlData.replace(/(^\s*<\?xml.*?\?>)/g,"");//Remove XML starting tag + //xmlData = xmlData.replace(/()/g,"");//Remove DOCTYPE + const tags = []; + let tagFound = false; + + //indicates that the root tag has been closed (aka. depth 0 has been reached) + let reachedRoot = false; + + if (xmlData[0] === '\ufeff') { + // check for byte order mark (BOM) + xmlData = xmlData.substr(1); + } + + for (let i = 0; i < xmlData.length; i++) { + + if (xmlData[i] === '<' && xmlData[i+1] === '?') { + i+=2; + i = readPI(xmlData,i); + if (i.err) return i; + }else if (xmlData[i] === '<') { + //starting of tag + //read until you reach to '>' avoiding any '>' in attribute value + let tagStartPos = i; + i++; + + if (xmlData[i] === '!') { + i = readCommentAndCDATA(xmlData, i); + continue; + } else { + let closingTag = false; + if (xmlData[i] === '/') { + //closing tag + closingTag = true; + i++; + } + //read tagname + let tagName = ''; + for (; i < xmlData.length && + xmlData[i] !== '>' && + xmlData[i] !== ' ' && + xmlData[i] !== '\t' && + xmlData[i] !== '\n' && + xmlData[i] !== '\r'; i++ + ) { + tagName += xmlData[i]; + } + tagName = tagName.trim(); + //console.log(tagName); + + if (tagName[tagName.length - 1] === '/') { + //self closing tag without attributes + tagName = tagName.substring(0, tagName.length - 1); + //continue; + i--; + } + if (!validateTagName(tagName)) { + let msg; + if (tagName.trim().length === 0) { + msg = "Invalid space after '<'."; + } else { + msg = "Tag '"+tagName+"' is an invalid name."; + } + return getErrorObject('InvalidTag', msg, getLineNumberForPosition(xmlData, i)); + } + + const result = readAttributeStr(xmlData, i); + if (result === false) { + return getErrorObject('InvalidAttr', "Attributes for '"+tagName+"' have open quote.", getLineNumberForPosition(xmlData, i)); + } + let attrStr = result.value; + i = result.index; + + if (attrStr[attrStr.length - 1] === '/') { + //self closing tag + const attrStrStart = i - attrStr.length; + attrStr = attrStr.substring(0, attrStr.length - 1); + const isValid = validateAttributeString(attrStr, options); + if (isValid === true) { + tagFound = true; + //continue; //text may presents after self closing tag + } else { + //the result from the nested function returns the position of the error within the attribute + //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute + //this gives us the absolute index in the entire xml, which we can use to find the line at last + return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, attrStrStart + isValid.err.line)); + } + } else if (closingTag) { + if (!result.tagClosed) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' doesn't have proper closing.", getLineNumberForPosition(xmlData, i)); + } else if (attrStr.trim().length > 0) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' can't have attributes or invalid starting.", getLineNumberForPosition(xmlData, tagStartPos)); + } else if (tags.length === 0) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' has not been opened.", getLineNumberForPosition(xmlData, tagStartPos)); + } else { + const otg = tags.pop(); + if (tagName !== otg.tagName) { + let openPos = getLineNumberForPosition(xmlData, otg.tagStartPos); + return getErrorObject('InvalidTag', + "Expected closing tag '"+otg.tagName+"' (opened in line "+openPos.line+", col "+openPos.col+") instead of closing tag '"+tagName+"'.", + getLineNumberForPosition(xmlData, tagStartPos)); + } + + //when there are no more tags, we reached the root level. + if (tags.length == 0) { + reachedRoot = true; + } + } + } else { + const isValid = validateAttributeString(attrStr, options); + if (isValid !== true) { + //the result from the nested function returns the position of the error within the attribute + //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute + //this gives us the absolute index in the entire xml, which we can use to find the line at last + return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, i - attrStr.length + isValid.err.line)); + } + + //if the root level has been reached before ... + if (reachedRoot === true) { + return getErrorObject('InvalidXml', 'Multiple possible root nodes found.', getLineNumberForPosition(xmlData, i)); + } else if(options.unpairedTags.indexOf(tagName) !== -1){ + //don't push into stack + } else { + tags.push({tagName, tagStartPos}); + } + tagFound = true; + } + + //skip tag text value + //It may include comments and CDATA value + for (i++; i < xmlData.length; i++) { + if (xmlData[i] === '<') { + if (xmlData[i + 1] === '!') { + //comment or CADATA + i++; + i = readCommentAndCDATA(xmlData, i); + continue; + } else if (xmlData[i+1] === '?') { + i = readPI(xmlData, ++i); + if (i.err) return i; + } else{ + break; + } + } else if (xmlData[i] === '&') { + const afterAmp = validateAmpersand(xmlData, i); + if (afterAmp == -1) + return getErrorObject('InvalidChar', "char '&' is not expected.", getLineNumberForPosition(xmlData, i)); + i = afterAmp; + }else{ + if (reachedRoot === true && !isWhiteSpace(xmlData[i])) { + return getErrorObject('InvalidXml', "Extra text at the end", getLineNumberForPosition(xmlData, i)); + } + } + } //end of reading tag text value + if (xmlData[i] === '<') { + i--; + } + } + } else { + if ( isWhiteSpace(xmlData[i])) { + continue; + } + return getErrorObject('InvalidChar', "char '"+xmlData[i]+"' is not expected.", getLineNumberForPosition(xmlData, i)); + } + } + + if (!tagFound) { + return getErrorObject('InvalidXml', 'Start tag expected.', 1); + }else if (tags.length == 1) { + return getErrorObject('InvalidTag', "Unclosed tag '"+tags[0].tagName+"'.", getLineNumberForPosition(xmlData, tags[0].tagStartPos)); + }else if (tags.length > 0) { + return getErrorObject('InvalidXml', "Invalid '"+ + JSON.stringify(tags.map(t => t.tagName), null, 4).replace(/\r?\n/g, '')+ + "' found.", {line: 1, col: 1}); + } + + return true; +}; + +function isWhiteSpace(char){ + return char === ' ' || char === '\t' || char === '\n' || char === '\r'; +} +/** + * Read Processing insstructions and skip + * @param {*} xmlData + * @param {*} i + */ +function readPI(xmlData, i) { + const start = i; + for (; i < xmlData.length; i++) { + if (xmlData[i] == '?' || xmlData[i] == ' ') { + //tagname + const tagname = xmlData.substr(start, i - start); + if (i > 5 && tagname === 'xml') { + return getErrorObject('InvalidXml', 'XML declaration allowed only at the start of the document.', getLineNumberForPosition(xmlData, i)); + } else if (xmlData[i] == '?' && xmlData[i + 1] == '>') { + //check if valid attribut string + i++; + break; + } else { + continue; + } + } + } + return i; +} + +function readCommentAndCDATA(xmlData, i) { + if (xmlData.length > i + 5 && xmlData[i + 1] === '-' && xmlData[i + 2] === '-') { + //comment + for (i += 3; i < xmlData.length; i++) { + if (xmlData[i] === '-' && xmlData[i + 1] === '-' && xmlData[i + 2] === '>') { + i += 2; + break; + } + } + } else if ( + xmlData.length > i + 8 && + xmlData[i + 1] === 'D' && + xmlData[i + 2] === 'O' && + xmlData[i + 3] === 'C' && + xmlData[i + 4] === 'T' && + xmlData[i + 5] === 'Y' && + xmlData[i + 6] === 'P' && + xmlData[i + 7] === 'E' + ) { + let angleBracketsCount = 1; + for (i += 8; i < xmlData.length; i++) { + if (xmlData[i] === '<') { + angleBracketsCount++; + } else if (xmlData[i] === '>') { + angleBracketsCount--; + if (angleBracketsCount === 0) { + break; + } + } + } + } else if ( + xmlData.length > i + 9 && + xmlData[i + 1] === '[' && + xmlData[i + 2] === 'C' && + xmlData[i + 3] === 'D' && + xmlData[i + 4] === 'A' && + xmlData[i + 5] === 'T' && + xmlData[i + 6] === 'A' && + xmlData[i + 7] === '[' + ) { + for (i += 8; i < xmlData.length; i++) { + if (xmlData[i] === ']' && xmlData[i + 1] === ']' && xmlData[i + 2] === '>') { + i += 2; + break; + } + } + } + + return i; +} + +const doubleQuote = '"'; +const singleQuote = "'"; + +/** + * Keep reading xmlData until '<' is found outside the attribute value. + * @param {string} xmlData + * @param {number} i + */ +function readAttributeStr(xmlData, i) { + let attrStr = ''; + let startChar = ''; + let tagClosed = false; + for (; i < xmlData.length; i++) { + if (xmlData[i] === doubleQuote || xmlData[i] === singleQuote) { + if (startChar === '') { + startChar = xmlData[i]; + } else if (startChar !== xmlData[i]) { + //if vaue is enclosed with double quote then single quotes are allowed inside the value and vice versa + } else { + startChar = ''; + } + } else if (xmlData[i] === '>') { + if (startChar === '') { + tagClosed = true; + break; + } + } + attrStr += xmlData[i]; + } + if (startChar !== '') { + return false; + } + + return { + value: attrStr, + index: i, + tagClosed: tagClosed + }; +} + +/** + * Select all the attributes whether valid or invalid. + */ +const validAttrStrRegxp = new RegExp('(\\s*)([^\\s=]+)(\\s*=)?(\\s*([\'"])(([\\s\\S])*?)\\5)?', 'g'); + +//attr, ="sd", a="amit's", a="sd"b="saf", ab cd="" + +function validateAttributeString(attrStr, options) { + //console.log("start:"+attrStr+":end"); + + //if(attrStr.trim().length === 0) return true; //empty string + + const matches = util.getAllMatches(attrStr, validAttrStrRegxp); + const attrNames = {}; + + for (let i = 0; i < matches.length; i++) { + if (matches[i][1].length === 0) { + //nospace before attribute name: a="sd"b="saf" + return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' has no space in starting.", getPositionFromMatch(matches[i])) + } else if (matches[i][3] !== undefined && matches[i][4] === undefined) { + return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' is without value.", getPositionFromMatch(matches[i])); + } else if (matches[i][3] === undefined && !options.allowBooleanAttributes) { + //independent attribute: ab + return getErrorObject('InvalidAttr', "boolean attribute '"+matches[i][2]+"' is not allowed.", getPositionFromMatch(matches[i])); + } + /* else if(matches[i][6] === undefined){//attribute without value: ab= + return { err: { code:"InvalidAttr",msg:"attribute " + matches[i][2] + " has no value assigned."}}; + } */ + const attrName = matches[i][2]; + if (!validateAttrName(attrName)) { + return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is an invalid name.", getPositionFromMatch(matches[i])); + } + if (!attrNames.hasOwnProperty(attrName)) { + //check for duplicate attribute. + attrNames[attrName] = 1; + } else { + return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is repeated.", getPositionFromMatch(matches[i])); + } + } + + return true; +} + +function validateNumberAmpersand(xmlData, i) { + let re = /\d/; + if (xmlData[i] === 'x') { + i++; + re = /[\da-fA-F]/; + } + for (; i < xmlData.length; i++) { + if (xmlData[i] === ';') + return i; + if (!xmlData[i].match(re)) + break; + } + return -1; +} + +function validateAmpersand(xmlData, i) { + // https://www.w3.org/TR/xml/#dt-charref + i++; + if (xmlData[i] === ';') + return -1; + if (xmlData[i] === '#') { + i++; + return validateNumberAmpersand(xmlData, i); + } + let count = 0; + for (; i < xmlData.length; i++, count++) { + if (xmlData[i].match(/\w/) && count < 20) + continue; + if (xmlData[i] === ';') + break; + return -1; + } + return i; +} + +function getErrorObject(code, message, lineNumber) { + return { + err: { + code: code, + msg: message, + line: lineNumber.line || lineNumber, + col: lineNumber.col, + }, + }; +} + +function validateAttrName(attrName) { + return util.isName(attrName); +} + +// const startsWithXML = /^xml/i; + +function validateTagName(tagname) { + return util.isName(tagname) /* && !tagname.match(startsWithXML) */; +} + +//this function returns the line number for the character at the given index +function getLineNumberForPosition(xmlData, index) { + const lines = xmlData.substring(0, index).split(/\r?\n/); + return { + line: lines.length, + + // column number is last line's length + 1, because column numbering starts at 1: + col: lines[lines.length - 1].length + 1 + }; +} + +//this function returns the position of the first character of match within attrStr +function getPositionFromMatch(match) { + return match.startIndex + match[1].length; +} + + +/***/ }), + +/***/ 659: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +//parse Empty Node as self closing node +const buildFromOrderedJs = __nccwpck_require__(6378); +const getIgnoreAttributesFn = __nccwpck_require__(812) + +const defaultOptions = { + attributeNamePrefix: '@_', + attributesGroupName: false, + textNodeName: '#text', + ignoreAttributes: true, + cdataPropName: false, + format: false, + indentBy: ' ', + suppressEmptyNode: false, + suppressUnpairedNode: true, + suppressBooleanAttributes: true, + tagValueProcessor: function(key, a) { + return a; + }, + attributeValueProcessor: function(attrName, a) { + return a; + }, + preserveOrder: false, + commentPropName: false, + unpairedTags: [], + entities: [ + { regex: new RegExp("&", "g"), val: "&" },//it must be on top + { regex: new RegExp(">", "g"), val: ">" }, + { regex: new RegExp("<", "g"), val: "<" }, + { regex: new RegExp("\'", "g"), val: "'" }, + { regex: new RegExp("\"", "g"), val: """ } + ], + processEntities: true, + stopNodes: [], + // transformTagName: false, + // transformAttributeName: false, + oneListGroup: false +}; + +function Builder(options) { + this.options = Object.assign({}, defaultOptions, options); + if (this.options.ignoreAttributes === true || this.options.attributesGroupName) { + this.isAttribute = function(/*a*/) { + return false; + }; + } else { + this.ignoreAttributesFn = getIgnoreAttributesFn(this.options.ignoreAttributes) + this.attrPrefixLen = this.options.attributeNamePrefix.length; + this.isAttribute = isAttribute; + } + + this.processTextOrObjNode = processTextOrObjNode + + if (this.options.format) { + this.indentate = indentate; + this.tagEndChar = '>\n'; + this.newLine = '\n'; + } else { + this.indentate = function() { + return ''; + }; + this.tagEndChar = '>'; + this.newLine = ''; + } +} + +Builder.prototype.build = function(jObj) { + if(this.options.preserveOrder){ + return buildFromOrderedJs(jObj, this.options); + }else { + if(Array.isArray(jObj) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1){ + jObj = { + [this.options.arrayNodeName] : jObj + } + } + return this.j2x(jObj, 0, []).val; + } +}; + +Builder.prototype.j2x = function(jObj, level, ajPath) { + let attrStr = ''; + let val = ''; + const jPath = ajPath.join('.') + for (let key in jObj) { + if(!Object.prototype.hasOwnProperty.call(jObj, key)) continue; + if (typeof jObj[key] === 'undefined') { + // supress undefined node only if it is not an attribute + if (this.isAttribute(key)) { + val += ''; + } + } else if (jObj[key] === null) { + // null attribute should be ignored by the attribute list, but should not cause the tag closing + if (this.isAttribute(key)) { + val += ''; + } else if (key === this.options.cdataPropName) { + val += ''; + } else if (key[0] === '?') { + val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; + } else { + val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } + // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } else if (jObj[key] instanceof Date) { + val += this.buildTextValNode(jObj[key], key, '', level); + } else if (typeof jObj[key] !== 'object') { + //premitive type + const attr = this.isAttribute(key); + if (attr && !this.ignoreAttributesFn(attr, jPath)) { + attrStr += this.buildAttrPairStr(attr, '' + jObj[key]); + } else if (!attr) { + //tag value + if (key === this.options.textNodeName) { + let newval = this.options.tagValueProcessor(key, '' + jObj[key]); + val += this.replaceEntitiesValue(newval); + } else { + val += this.buildTextValNode(jObj[key], key, '', level); + } + } + } else if (Array.isArray(jObj[key])) { + //repeated nodes + const arrLen = jObj[key].length; + let listTagVal = ""; + let listTagAttr = ""; + for (let j = 0; j < arrLen; j++) { + const item = jObj[key][j]; + if (typeof item === 'undefined') { + // supress undefined node + } else if (item === null) { + if(key[0] === "?") val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; + else val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } else if (typeof item === 'object') { + if(this.options.oneListGroup){ + const result = this.j2x(item, level + 1, ajPath.concat(key)); + listTagVal += result.val; + if (this.options.attributesGroupName && item.hasOwnProperty(this.options.attributesGroupName)) { + listTagAttr += result.attrStr + } + }else{ + listTagVal += this.processTextOrObjNode(item, key, level, ajPath) + } + } else { + if (this.options.oneListGroup) { + let textValue = this.options.tagValueProcessor(key, item); + textValue = this.replaceEntitiesValue(textValue); + listTagVal += textValue; + } else { + listTagVal += this.buildTextValNode(item, key, '', level); + } + } + } + if(this.options.oneListGroup){ + listTagVal = this.buildObjectNode(listTagVal, key, listTagAttr, level); + } + val += listTagVal; + } else { + //nested node + if (this.options.attributesGroupName && key === this.options.attributesGroupName) { + const Ks = Object.keys(jObj[key]); + const L = Ks.length; + for (let j = 0; j < L; j++) { + attrStr += this.buildAttrPairStr(Ks[j], '' + jObj[key][Ks[j]]); + } + } else { + val += this.processTextOrObjNode(jObj[key], key, level, ajPath) + } + } + } + return {attrStr: attrStr, val: val}; +}; + +Builder.prototype.buildAttrPairStr = function(attrName, val){ + val = this.options.attributeValueProcessor(attrName, '' + val); + val = this.replaceEntitiesValue(val); + if (this.options.suppressBooleanAttributes && val === "true") { + return ' ' + attrName; + } else return ' ' + attrName + '="' + val + '"'; +} + +function processTextOrObjNode (object, key, level, ajPath) { + const result = this.j2x(object, level + 1, ajPath.concat(key)); + if (object[this.options.textNodeName] !== undefined && Object.keys(object).length === 1) { + return this.buildTextValNode(object[this.options.textNodeName], key, result.attrStr, level); + } else { + return this.buildObjectNode(result.val, key, result.attrStr, level); + } +} + +Builder.prototype.buildObjectNode = function(val, key, attrStr, level) { + if(val === ""){ + if(key[0] === "?") return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; + else { + return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; + } + }else{ + + let tagEndExp = '' + val + tagEndExp ); + } else if (this.options.commentPropName !== false && key === this.options.commentPropName && piClosingChar.length === 0) { + return this.indentate(level) + `` + this.newLine; + }else { + return ( + this.indentate(level) + '<' + key + attrStr + piClosingChar + this.tagEndChar + + val + + this.indentate(level) + tagEndExp ); + } + } +} + +Builder.prototype.closeTag = function(key){ + let closeTag = ""; + if(this.options.unpairedTags.indexOf(key) !== -1){ //unpaired + if(!this.options.suppressUnpairedNode) closeTag = "/" + }else if(this.options.suppressEmptyNode){ //empty + closeTag = "/"; + }else{ + closeTag = `>` + this.newLine; + }else if (this.options.commentPropName !== false && key === this.options.commentPropName) { + return this.indentate(level) + `` + this.newLine; + }else if(key[0] === "?") {//PI tag + return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; + }else{ + let textValue = this.options.tagValueProcessor(key, val); + textValue = this.replaceEntitiesValue(textValue); + + if( textValue === ''){ + return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; + }else{ + return this.indentate(level) + '<' + key + attrStr + '>' + + textValue + + ' 0 && this.options.processEntities){ + for (let i=0; i { + +const EOL = "\n"; + +/** + * + * @param {array} jArray + * @param {any} options + * @returns + */ +function toXml(jArray, options) { + let indentation = ""; + if (options.format && options.indentBy.length > 0) { + indentation = EOL; + } + return arrToStr(jArray, options, "", indentation); +} + +function arrToStr(arr, options, jPath, indentation) { + let xmlStr = ""; + let isPreviousElementTag = false; + + for (let i = 0; i < arr.length; i++) { + const tagObj = arr[i]; + const tagName = propName(tagObj); + if(tagName === undefined) continue; + + let newJPath = ""; + if (jPath.length === 0) newJPath = tagName + else newJPath = `${jPath}.${tagName}`; + + if (tagName === options.textNodeName) { + let tagText = tagObj[tagName]; + if (!isStopNode(newJPath, options)) { + tagText = options.tagValueProcessor(tagName, tagText); + tagText = replaceEntitiesValue(tagText, options); + } + if (isPreviousElementTag) { + xmlStr += indentation; + } + xmlStr += tagText; + isPreviousElementTag = false; + continue; + } else if (tagName === options.cdataPropName) { + if (isPreviousElementTag) { + xmlStr += indentation; + } + xmlStr += ``; + isPreviousElementTag = false; + continue; + } else if (tagName === options.commentPropName) { + xmlStr += indentation + ``; + isPreviousElementTag = true; + continue; + } else if (tagName[0] === "?") { + const attStr = attr_to_str(tagObj[":@"], options); + const tempInd = tagName === "?xml" ? "" : indentation; + let piTextNodeName = tagObj[tagName][0][options.textNodeName]; + piTextNodeName = piTextNodeName.length !== 0 ? " " + piTextNodeName : ""; //remove extra spacing + xmlStr += tempInd + `<${tagName}${piTextNodeName}${attStr}?>`; + isPreviousElementTag = true; + continue; + } + let newIdentation = indentation; + if (newIdentation !== "") { + newIdentation += options.indentBy; + } + const attStr = attr_to_str(tagObj[":@"], options); + const tagStart = indentation + `<${tagName}${attStr}`; + const tagValue = arrToStr(tagObj[tagName], options, newJPath, newIdentation); + if (options.unpairedTags.indexOf(tagName) !== -1) { + if (options.suppressUnpairedNode) xmlStr += tagStart + ">"; + else xmlStr += tagStart + "/>"; + } else if ((!tagValue || tagValue.length === 0) && options.suppressEmptyNode) { + xmlStr += tagStart + "/>"; + } else if (tagValue && tagValue.endsWith(">")) { + xmlStr += tagStart + `>${tagValue}${indentation}`; + } else { + xmlStr += tagStart + ">"; + if (tagValue && indentation !== "" && (tagValue.includes("/>") || tagValue.includes("`; + } + isPreviousElementTag = true; + } + + return xmlStr; +} + +function propName(obj) { + const keys = Object.keys(obj); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + if(!obj.hasOwnProperty(key)) continue; + if (key !== ":@") return key; + } +} + +function attr_to_str(attrMap, options) { + let attrStr = ""; + if (attrMap && !options.ignoreAttributes) { + for (let attr in attrMap) { + if(!attrMap.hasOwnProperty(attr)) continue; + let attrVal = options.attributeValueProcessor(attr, attrMap[attr]); + attrVal = replaceEntitiesValue(attrVal, options); + if (attrVal === true && options.suppressBooleanAttributes) { + attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}`; + } else { + attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}="${attrVal}"`; + } + } + } + return attrStr; +} + +function isStopNode(jPath, options) { + jPath = jPath.substr(0, jPath.length - options.textNodeName.length - 1); + let tagName = jPath.substr(jPath.lastIndexOf(".") + 1); + for (let index in options.stopNodes) { + if (options.stopNodes[index] === jPath || options.stopNodes[index] === "*." + tagName) return true; + } + return false; +} + +function replaceEntitiesValue(textValue, options) { + if (textValue && textValue.length > 0 && options.processEntities) { + for (let i = 0; i < options.entities.length; i++) { + const entity = options.entities[i]; + textValue = textValue.replace(entity.regex, entity.val); + } + } + return textValue; +} +module.exports = toXml; + + +/***/ }), + +/***/ 151: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const util = __nccwpck_require__(7019); + +//TODO: handle comments +function readDocType(xmlData, i){ + + const entities = {}; + if( xmlData[i + 3] === 'O' && + xmlData[i + 4] === 'C' && + xmlData[i + 5] === 'T' && + xmlData[i + 6] === 'Y' && + xmlData[i + 7] === 'P' && + xmlData[i + 8] === 'E') + { + i = i+9; + let angleBracketsCount = 1; + let hasBody = false, comment = false; + let exp = ""; + for(;i') { //Read tag content + if(comment){ + if( xmlData[i - 1] === "-" && xmlData[i - 2] === "-"){ + comment = false; + angleBracketsCount--; + } + }else{ + angleBracketsCount--; + } + if (angleBracketsCount === 0) { + break; + } + }else if( xmlData[i] === '['){ + hasBody = true; + }else{ + exp += xmlData[i]; + } + } + if(angleBracketsCount !== 0){ + throw new Error(`Unclosed DOCTYPE`); + } + }else{ + throw new Error(`Invalid Tag instead of DOCTYPE`); + } + return {entities, i}; +} + +function readEntityExp(xmlData,i){ + //External entities are not supported + // + + //Parameter entities are not supported + // + + //Internal entities are supported + // + + //read EntityName + let entityName = ""; + for (; i < xmlData.length && (xmlData[i] !== "'" && xmlData[i] !== '"' ); i++) { + // if(xmlData[i] === " ") continue; + // else + entityName += xmlData[i]; + } + entityName = entityName.trim(); + if(entityName.indexOf(" ") !== -1) throw new Error("External entites are not supported"); + + //read Entity Value + const startChar = xmlData[i++]; + let val = "" + for (; i < xmlData.length && xmlData[i] !== startChar ; i++) { + val += xmlData[i]; + } + return [entityName, val, i]; +} + +function isComment(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === '-' && + xmlData[i+3] === '-') return true + return false +} +function isEntity(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'E' && + xmlData[i+3] === 'N' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'I' && + xmlData[i+6] === 'T' && + xmlData[i+7] === 'Y') return true + return false +} +function isElement(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'E' && + xmlData[i+3] === 'L' && + xmlData[i+4] === 'E' && + xmlData[i+5] === 'M' && + xmlData[i+6] === 'E' && + xmlData[i+7] === 'N' && + xmlData[i+8] === 'T') return true + return false +} + +function isAttlist(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'A' && + xmlData[i+3] === 'T' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'L' && + xmlData[i+6] === 'I' && + xmlData[i+7] === 'S' && + xmlData[i+8] === 'T') return true + return false +} +function isNotation(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'N' && + xmlData[i+3] === 'O' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'A' && + xmlData[i+6] === 'T' && + xmlData[i+7] === 'I' && + xmlData[i+8] === 'O' && + xmlData[i+9] === 'N') return true + return false +} + +function validateEntityName(name){ + if (util.isName(name)) + return name; + else + throw new Error(`Invalid entity name ${name}`); +} + +module.exports = readDocType; + + +/***/ }), + +/***/ 4769: +/***/ ((__unused_webpack_module, exports) => { + + +const defaultOptions = { + preserveOrder: false, + attributeNamePrefix: '@_', + attributesGroupName: false, + textNodeName: '#text', + ignoreAttributes: true, + removeNSPrefix: false, // remove NS from tag name or attribute name if true + allowBooleanAttributes: false, //a tag can have attributes without any value + //ignoreRootElement : false, + parseTagValue: true, + parseAttributeValue: false, + trimValues: true, //Trim string values of tag and attributes + cdataPropName: false, + numberParseOptions: { + hex: true, + leadingZeros: true, + eNotation: true + }, + tagValueProcessor: function(tagName, val) { + return val; + }, + attributeValueProcessor: function(attrName, val) { + return val; + }, + stopNodes: [], //nested tags will not be parsed even for errors + alwaysCreateTextNode: false, + isArray: () => false, + commentPropName: false, + unpairedTags: [], + processEntities: true, + htmlEntities: false, + ignoreDeclaration: false, + ignorePiTags: false, + transformTagName: false, + transformAttributeName: false, + updateTag: function(tagName, jPath, attrs){ + return tagName + }, + // skipEmptyListItem: false +}; + +const buildOptions = function(options) { + return Object.assign({}, defaultOptions, options); +}; + +exports.buildOptions = buildOptions; +exports.defaultOptions = defaultOptions; + +/***/ }), + +/***/ 3017: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +///@ts-check + +const util = __nccwpck_require__(7019); +const xmlNode = __nccwpck_require__(9307); +const readDocType = __nccwpck_require__(151); +const toNumber = __nccwpck_require__(6496); +const getIgnoreAttributesFn = __nccwpck_require__(812) + +// const regx = +// '<((!\\[CDATA\\[([\\s\\S]*?)(]]>))|((NAME:)?(NAME))([^>]*)>|((\\/)(NAME)\\s*>))([^<]*)' +// .replace(/NAME/g, util.nameRegexp); + +//const tagsRegx = new RegExp("<(\\/?[\\w:\\-\._]+)([^>]*)>(\\s*"+cdataRegx+")*([^<]+)?","g"); +//const tagsRegx = new RegExp("<(\\/?)((\\w*:)?([\\w:\\-\._]+))([^>]*)>([^<]*)("+cdataRegx+"([^<]*))*([^<]+)?","g"); + +class OrderedObjParser{ + constructor(options){ + this.options = options; + this.currentNode = null; + this.tagsNodeStack = []; + this.docTypeEntities = {}; + this.lastEntities = { + "apos" : { regex: /&(apos|#39|#x27);/g, val : "'"}, + "gt" : { regex: /&(gt|#62|#x3E);/g, val : ">"}, + "lt" : { regex: /&(lt|#60|#x3C);/g, val : "<"}, + "quot" : { regex: /&(quot|#34|#x22);/g, val : "\""}, + }; + this.ampEntity = { regex: /&(amp|#38|#x26);/g, val : "&"}; + this.htmlEntities = { + "space": { regex: /&(nbsp|#160);/g, val: " " }, + // "lt" : { regex: /&(lt|#60);/g, val: "<" }, + // "gt" : { regex: /&(gt|#62);/g, val: ">" }, + // "amp" : { regex: /&(amp|#38);/g, val: "&" }, + // "quot" : { regex: /&(quot|#34);/g, val: "\"" }, + // "apos" : { regex: /&(apos|#39);/g, val: "'" }, + "cent" : { regex: /&(cent|#162);/g, val: "¢" }, + "pound" : { regex: /&(pound|#163);/g, val: "£" }, + "yen" : { regex: /&(yen|#165);/g, val: "¥" }, + "euro" : { regex: /&(euro|#8364);/g, val: "€" }, + "copyright" : { regex: /&(copy|#169);/g, val: "©" }, + "reg" : { regex: /&(reg|#174);/g, val: "®" }, + "inr" : { regex: /&(inr|#8377);/g, val: "₹" }, + "num_dec": { regex: /&#([0-9]{1,7});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 10)) }, + "num_hex": { regex: /&#x([0-9a-fA-F]{1,6});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 16)) }, + }; + this.addExternalEntities = addExternalEntities; + this.parseXml = parseXml; + this.parseTextData = parseTextData; + this.resolveNameSpace = resolveNameSpace; + this.buildAttributesMap = buildAttributesMap; + this.isItStopNode = isItStopNode; + this.replaceEntitiesValue = replaceEntitiesValue; + this.readStopNodeData = readStopNodeData; + this.saveTextToParentTag = saveTextToParentTag; + this.addChild = addChild; + this.ignoreAttributesFn = getIgnoreAttributesFn(this.options.ignoreAttributes) + } + +} + +function addExternalEntities(externalEntities){ + const entKeys = Object.keys(externalEntities); + for (let i = 0; i < entKeys.length; i++) { + const ent = entKeys[i]; + this.lastEntities[ent] = { + regex: new RegExp("&"+ent+";","g"), + val : externalEntities[ent] + } + } +} + +/** + * @param {string} val + * @param {string} tagName + * @param {string} jPath + * @param {boolean} dontTrim + * @param {boolean} hasAttributes + * @param {boolean} isLeafNode + * @param {boolean} escapeEntities + */ +function parseTextData(val, tagName, jPath, dontTrim, hasAttributes, isLeafNode, escapeEntities) { + if (val !== undefined) { + if (this.options.trimValues && !dontTrim) { + val = val.trim(); + } + if(val.length > 0){ + if(!escapeEntities) val = this.replaceEntitiesValue(val); + + const newval = this.options.tagValueProcessor(tagName, val, jPath, hasAttributes, isLeafNode); + if(newval === null || newval === undefined){ + //don't parse + return val; + }else if(typeof newval !== typeof val || newval !== val){ + //overwrite + return newval; + }else if(this.options.trimValues){ + return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); + }else{ + const trimmedVal = val.trim(); + if(trimmedVal === val){ + return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); + }else{ + return val; + } + } + } + } +} + +function resolveNameSpace(tagname) { + if (this.options.removeNSPrefix) { + const tags = tagname.split(':'); + const prefix = tagname.charAt(0) === '/' ? '/' : ''; + if (tags[0] === 'xmlns') { + return ''; + } + if (tags.length === 2) { + tagname = prefix + tags[1]; + } + } + return tagname; +} + +//TODO: change regex to capture NS +//const attrsRegx = new RegExp("([\\w\\-\\.\\:]+)\\s*=\\s*(['\"])((.|\n)*?)\\2","gm"); +const attrsRegx = new RegExp('([^\\s=]+)\\s*(=\\s*([\'"])([\\s\\S]*?)\\3)?', 'gm'); + +function buildAttributesMap(attrStr, jPath, tagName) { + if (this.options.ignoreAttributes !== true && typeof attrStr === 'string') { + // attrStr = attrStr.replace(/\r?\n/g, ' '); + //attrStr = attrStr || attrStr.trim(); + + const matches = util.getAllMatches(attrStr, attrsRegx); + const len = matches.length; //don't make it inline + const attrs = {}; + for (let i = 0; i < len; i++) { + const attrName = this.resolveNameSpace(matches[i][1]); + if (this.ignoreAttributesFn(attrName, jPath)) { + continue + } + let oldVal = matches[i][4]; + let aName = this.options.attributeNamePrefix + attrName; + if (attrName.length) { + if (this.options.transformAttributeName) { + aName = this.options.transformAttributeName(aName); + } + if(aName === "__proto__") aName = "#__proto__"; + if (oldVal !== undefined) { + if (this.options.trimValues) { + oldVal = oldVal.trim(); + } + oldVal = this.replaceEntitiesValue(oldVal); + const newVal = this.options.attributeValueProcessor(attrName, oldVal, jPath); + if(newVal === null || newVal === undefined){ + //don't parse + attrs[aName] = oldVal; + }else if(typeof newVal !== typeof oldVal || newVal !== oldVal){ + //overwrite + attrs[aName] = newVal; + }else{ + //parse + attrs[aName] = parseValue( + oldVal, + this.options.parseAttributeValue, + this.options.numberParseOptions + ); + } + } else if (this.options.allowBooleanAttributes) { + attrs[aName] = true; + } + } + } + if (!Object.keys(attrs).length) { + return; + } + if (this.options.attributesGroupName) { + const attrCollection = {}; + attrCollection[this.options.attributesGroupName] = attrs; + return attrCollection; + } + return attrs + } +} + +const parseXml = function(xmlData) { + xmlData = xmlData.replace(/\r\n?/g, "\n"); //TODO: remove this line + const xmlObj = new xmlNode('!xml'); + let currentNode = xmlObj; + let textData = ""; + let jPath = ""; + for(let i=0; i< xmlData.length; i++){//for each char in XML data + const ch = xmlData[i]; + if(ch === '<'){ + // const nextIndex = i+1; + // const _2ndChar = xmlData[nextIndex]; + if( xmlData[i+1] === '/') {//Closing Tag + const closeIndex = findClosingIndex(xmlData, ">", i, "Closing Tag is not closed.") + let tagName = xmlData.substring(i+2,closeIndex).trim(); + + if(this.options.removeNSPrefix){ + const colonIndex = tagName.indexOf(":"); + if(colonIndex !== -1){ + tagName = tagName.substr(colonIndex+1); + } + } + + if(this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + if(currentNode){ + textData = this.saveTextToParentTag(textData, currentNode, jPath); + } + + //check if last tag of nested tag was unpaired tag + const lastTagName = jPath.substring(jPath.lastIndexOf(".")+1); + if(tagName && this.options.unpairedTags.indexOf(tagName) !== -1 ){ + throw new Error(`Unpaired tag can not be used as closing tag: `); + } + let propIndex = 0 + if(lastTagName && this.options.unpairedTags.indexOf(lastTagName) !== -1 ){ + propIndex = jPath.lastIndexOf('.', jPath.lastIndexOf('.')-1) + this.tagsNodeStack.pop(); + }else{ + propIndex = jPath.lastIndexOf("."); + } + jPath = jPath.substring(0, propIndex); + + currentNode = this.tagsNodeStack.pop();//avoid recursion, set the parent tag scope + textData = ""; + i = closeIndex; + } else if( xmlData[i+1] === '?') { + + let tagData = readTagExp(xmlData,i, false, "?>"); + if(!tagData) throw new Error("Pi Tag is not closed."); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + if( (this.options.ignoreDeclaration && tagData.tagName === "?xml") || this.options.ignorePiTags){ + + }else{ + + const childNode = new xmlNode(tagData.tagName); + childNode.add(this.options.textNodeName, ""); + + if(tagData.tagName !== tagData.tagExp && tagData.attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagData.tagExp, jPath, tagData.tagName); + } + this.addChild(currentNode, childNode, jPath) + + } + + + i = tagData.closeIndex + 1; + } else if(xmlData.substr(i + 1, 3) === '!--') { + const endIndex = findClosingIndex(xmlData, "-->", i+4, "Comment is not closed.") + if(this.options.commentPropName){ + const comment = xmlData.substring(i + 4, endIndex - 2); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + + currentNode.add(this.options.commentPropName, [ { [this.options.textNodeName] : comment } ]); + } + i = endIndex; + } else if( xmlData.substr(i + 1, 2) === '!D') { + const result = readDocType(xmlData, i); + this.docTypeEntities = result.entities; + i = result.i; + }else if(xmlData.substr(i + 1, 2) === '![') { + const closeIndex = findClosingIndex(xmlData, "]]>", i, "CDATA is not closed.") - 2; + const tagExp = xmlData.substring(i + 9,closeIndex); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + + let val = this.parseTextData(tagExp, currentNode.tagname, jPath, true, false, true, true); + if(val == undefined) val = ""; + + //cdata should be set even if it is 0 length string + if(this.options.cdataPropName){ + currentNode.add(this.options.cdataPropName, [ { [this.options.textNodeName] : tagExp } ]); + }else{ + currentNode.add(this.options.textNodeName, val); + } + + i = closeIndex + 2; + }else {//Opening tag + let result = readTagExp(xmlData,i, this.options.removeNSPrefix); + let tagName= result.tagName; + const rawTagName = result.rawTagName; + let tagExp = result.tagExp; + let attrExpPresent = result.attrExpPresent; + let closeIndex = result.closeIndex; + + if (this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + //save text as child node + if (currentNode && textData) { + if(currentNode.tagname !== '!xml'){ + //when nested tag is found + textData = this.saveTextToParentTag(textData, currentNode, jPath, false); + } + } + + //check if last tag was unpaired tag + const lastTag = currentNode; + if(lastTag && this.options.unpairedTags.indexOf(lastTag.tagname) !== -1 ){ + currentNode = this.tagsNodeStack.pop(); + jPath = jPath.substring(0, jPath.lastIndexOf(".")); + } + if(tagName !== xmlObj.tagname){ + jPath += jPath ? "." + tagName : tagName; + } + if (this.isItStopNode(this.options.stopNodes, jPath, tagName)) { + let tagContent = ""; + //self-closing tag + if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ + if(tagName[tagName.length - 1] === "/"){ //remove trailing '/' + tagName = tagName.substr(0, tagName.length - 1); + jPath = jPath.substr(0, jPath.length - 1); + tagExp = tagName; + }else{ + tagExp = tagExp.substr(0, tagExp.length - 1); + } + i = result.closeIndex; + } + //unpaired tag + else if(this.options.unpairedTags.indexOf(tagName) !== -1){ + + i = result.closeIndex; + } + //normal tag + else{ + //read until closing tag is found + const result = this.readStopNodeData(xmlData, rawTagName, closeIndex + 1); + if(!result) throw new Error(`Unexpected end of ${rawTagName}`); + i = result.i; + tagContent = result.tagContent; + } + + const childNode = new xmlNode(tagName); + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + if(tagContent) { + tagContent = this.parseTextData(tagContent, tagName, jPath, true, attrExpPresent, true, true); + } + + jPath = jPath.substr(0, jPath.lastIndexOf(".")); + childNode.add(this.options.textNodeName, tagContent); + + this.addChild(currentNode, childNode, jPath) + }else{ + //selfClosing tag + if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ + if(tagName[tagName.length - 1] === "/"){ //remove trailing '/' + tagName = tagName.substr(0, tagName.length - 1); + jPath = jPath.substr(0, jPath.length - 1); + tagExp = tagName; + }else{ + tagExp = tagExp.substr(0, tagExp.length - 1); + } + + if(this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + const childNode = new xmlNode(tagName); + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + this.addChild(currentNode, childNode, jPath) + jPath = jPath.substr(0, jPath.lastIndexOf(".")); + } + //opening tag + else{ + const childNode = new xmlNode( tagName); + this.tagsNodeStack.push(currentNode); + + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + this.addChild(currentNode, childNode, jPath) + currentNode = childNode; + } + textData = ""; + i = closeIndex; + } + } + }else{ + textData += xmlData[i]; + } + } + return xmlObj.child; +} + +function addChild(currentNode, childNode, jPath){ + const result = this.options.updateTag(childNode.tagname, jPath, childNode[":@"]) + if(result === false){ + }else if(typeof result === "string"){ + childNode.tagname = result + currentNode.addChild(childNode); + }else{ + currentNode.addChild(childNode); + } +} + +const replaceEntitiesValue = function(val){ + + if(this.options.processEntities){ + for(let entityName in this.docTypeEntities){ + const entity = this.docTypeEntities[entityName]; + val = val.replace( entity.regx, entity.val); + } + for(let entityName in this.lastEntities){ + const entity = this.lastEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + if(this.options.htmlEntities){ + for(let entityName in this.htmlEntities){ + const entity = this.htmlEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + } + val = val.replace( this.ampEntity.regex, this.ampEntity.val); + } + return val; +} +function saveTextToParentTag(textData, currentNode, jPath, isLeafNode) { + if (textData) { //store previously collected data as textNode + if(isLeafNode === undefined) isLeafNode = currentNode.child.length === 0 + + textData = this.parseTextData(textData, + currentNode.tagname, + jPath, + false, + currentNode[":@"] ? Object.keys(currentNode[":@"]).length !== 0 : false, + isLeafNode); + + if (textData !== undefined && textData !== "") + currentNode.add(this.options.textNodeName, textData); + textData = ""; + } + return textData; +} + +//TODO: use jPath to simplify the logic +/** + * + * @param {string[]} stopNodes + * @param {string} jPath + * @param {string} currentTagName + */ +function isItStopNode(stopNodes, jPath, currentTagName){ + const allNodesExp = "*." + currentTagName; + for (const stopNodePath in stopNodes) { + const stopNodeExp = stopNodes[stopNodePath]; + if( allNodesExp === stopNodeExp || jPath === stopNodeExp ) return true; + } + return false; +} + +/** + * Returns the tag Expression and where it is ending handling single-double quotes situation + * @param {string} xmlData + * @param {number} i starting index + * @returns + */ +function tagExpWithClosingIndex(xmlData, i, closingChar = ">"){ + let attrBoundary; + let tagExp = ""; + for (let index = i; index < xmlData.length; index++) { + let ch = xmlData[index]; + if (attrBoundary) { + if (ch === attrBoundary) attrBoundary = "";//reset + } else if (ch === '"' || ch === "'") { + attrBoundary = ch; + } else if (ch === closingChar[0]) { + if(closingChar[1]){ + if(xmlData[index + 1] === closingChar[1]){ + return { + data: tagExp, + index: index + } + } + }else{ + return { + data: tagExp, + index: index + } + } + } else if (ch === '\t') { + ch = " " + } + tagExp += ch; + } +} + +function findClosingIndex(xmlData, str, i, errMsg){ + const closingIndex = xmlData.indexOf(str, i); + if(closingIndex === -1){ + throw new Error(errMsg) + }else{ + return closingIndex + str.length - 1; + } +} + +function readTagExp(xmlData,i, removeNSPrefix, closingChar = ">"){ + const result = tagExpWithClosingIndex(xmlData, i+1, closingChar); + if(!result) return; + let tagExp = result.data; + const closeIndex = result.index; + const separatorIndex = tagExp.search(/\s/); + let tagName = tagExp; + let attrExpPresent = true; + if(separatorIndex !== -1){//separate tag name and attributes expression + tagName = tagExp.substring(0, separatorIndex); + tagExp = tagExp.substring(separatorIndex + 1).trimStart(); + } + + const rawTagName = tagName; + if(removeNSPrefix){ + const colonIndex = tagName.indexOf(":"); + if(colonIndex !== -1){ + tagName = tagName.substr(colonIndex+1); + attrExpPresent = tagName !== result.data.substr(colonIndex + 1); + } + } + + return { + tagName: tagName, + tagExp: tagExp, + closeIndex: closeIndex, + attrExpPresent: attrExpPresent, + rawTagName: rawTagName, + } +} +/** + * find paired tag for a stop node + * @param {string} xmlData + * @param {string} tagName + * @param {number} i + */ +function readStopNodeData(xmlData, tagName, i){ + const startIndex = i; + // Starting at 1 since we already have an open tag + let openTagCount = 1; + + for (; i < xmlData.length; i++) { + if( xmlData[i] === "<"){ + if (xmlData[i+1] === "/") {//close tag + const closeIndex = findClosingIndex(xmlData, ">", i, `${tagName} is not closed`); + let closeTagName = xmlData.substring(i+2,closeIndex).trim(); + if(closeTagName === tagName){ + openTagCount--; + if (openTagCount === 0) { + return { + tagContent: xmlData.substring(startIndex, i), + i : closeIndex + } + } + } + i=closeIndex; + } else if(xmlData[i+1] === '?') { + const closeIndex = findClosingIndex(xmlData, "?>", i+1, "StopNode is not closed.") + i=closeIndex; + } else if(xmlData.substr(i + 1, 3) === '!--') { + const closeIndex = findClosingIndex(xmlData, "-->", i+3, "StopNode is not closed.") + i=closeIndex; + } else if(xmlData.substr(i + 1, 2) === '![') { + const closeIndex = findClosingIndex(xmlData, "]]>", i, "StopNode is not closed.") - 2; + i=closeIndex; + } else { + const tagData = readTagExp(xmlData, i, '>') + + if (tagData) { + const openTagName = tagData && tagData.tagName; + if (openTagName === tagName && tagData.tagExp[tagData.tagExp.length-1] !== "/") { + openTagCount++; + } + i=tagData.closeIndex; + } + } + } + }//end for loop +} + +function parseValue(val, shouldParse, options) { + if (shouldParse && typeof val === 'string') { + //console.log(options) + const newval = val.trim(); + if(newval === 'true' ) return true; + else if(newval === 'false' ) return false; + else return toNumber(val, options); + } else { + if (util.isExist(val)) { + return val; + } else { + return ''; + } + } +} + + +module.exports = OrderedObjParser; + + +/***/ }), + +/***/ 9844: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const { buildOptions} = __nccwpck_require__(4769); +const OrderedObjParser = __nccwpck_require__(3017); +const { prettify} = __nccwpck_require__(7594); +const validator = __nccwpck_require__(9433); + +class XMLParser{ + + constructor(options){ + this.externalEntities = {}; + this.options = buildOptions(options); + + } + /** + * Parse XML dats to JS object + * @param {string|Buffer} xmlData + * @param {boolean|Object} validationOption + */ + parse(xmlData,validationOption){ + if(typeof xmlData === "string"){ + }else if( xmlData.toString){ + xmlData = xmlData.toString(); + }else{ + throw new Error("XML data is accepted in String or Bytes[] form.") + } + if( validationOption){ + if(validationOption === true) validationOption = {}; //validate with default options + + const result = validator.validate(xmlData, validationOption); + if (result !== true) { + throw Error( `${result.err.msg}:${result.err.line}:${result.err.col}` ) + } + } + const orderedObjParser = new OrderedObjParser(this.options); + orderedObjParser.addExternalEntities(this.externalEntities); + const orderedResult = orderedObjParser.parseXml(xmlData); + if(this.options.preserveOrder || orderedResult === undefined) return orderedResult; + else return prettify(orderedResult, this.options); + } + + /** + * Add Entity which is not by default supported by this library + * @param {string} key + * @param {string} value + */ + addEntity(key, value){ + if(value.indexOf("&") !== -1){ + throw new Error("Entity value can't have '&'") + }else if(key.indexOf("&") !== -1 || key.indexOf(";") !== -1){ + throw new Error("An entity must be set without '&' and ';'. Eg. use '#xD' for ' '") + }else if(value === "&"){ + throw new Error("An entity with value '&' is not permitted"); + }else{ + this.externalEntities[key] = value; + } + } +} + +module.exports = XMLParser; + +/***/ }), + +/***/ 7594: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +/** + * + * @param {array} node + * @param {any} options + * @returns + */ +function prettify(node, options){ + return compress( node, options); +} + +/** + * + * @param {array} arr + * @param {object} options + * @param {string} jPath + * @returns object + */ +function compress(arr, options, jPath){ + let text; + const compressedObj = {}; + for (let i = 0; i < arr.length; i++) { + const tagObj = arr[i]; + const property = propName(tagObj); + let newJpath = ""; + if(jPath === undefined) newJpath = property; + else newJpath = jPath + "." + property; + + if(property === options.textNodeName){ + if(text === undefined) text = tagObj[property]; + else text += "" + tagObj[property]; + }else if(property === undefined){ + continue; + }else if(tagObj[property]){ + + let val = compress(tagObj[property], options, newJpath); + const isLeaf = isLeafTag(val, options); + + if(tagObj[":@"]){ + assignAttributes( val, tagObj[":@"], newJpath, options); + }else if(Object.keys(val).length === 1 && val[options.textNodeName] !== undefined && !options.alwaysCreateTextNode){ + val = val[options.textNodeName]; + }else if(Object.keys(val).length === 0){ + if(options.alwaysCreateTextNode) val[options.textNodeName] = ""; + else val = ""; + } + + if(compressedObj[property] !== undefined && compressedObj.hasOwnProperty(property)) { + if(!Array.isArray(compressedObj[property])) { + compressedObj[property] = [ compressedObj[property] ]; + } + compressedObj[property].push(val); + }else{ + //TODO: if a node is not an array, then check if it should be an array + //also determine if it is a leaf node + if (options.isArray(property, newJpath, isLeaf )) { + compressedObj[property] = [val]; + }else{ + compressedObj[property] = val; + } + } + } + + } + // if(text && text.length > 0) compressedObj[options.textNodeName] = text; + if(typeof text === "string"){ + if(text.length > 0) compressedObj[options.textNodeName] = text; + }else if(text !== undefined) compressedObj[options.textNodeName] = text; + return compressedObj; +} + +function propName(obj){ + const keys = Object.keys(obj); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + if(key !== ":@") return key; + } +} + +function assignAttributes(obj, attrMap, jpath, options){ + if (attrMap) { + const keys = Object.keys(attrMap); + const len = keys.length; //don't make it inline + for (let i = 0; i < len; i++) { + const atrrName = keys[i]; + if (options.isArray(atrrName, jpath + "." + atrrName, true, true)) { + obj[atrrName] = [ attrMap[atrrName] ]; + } else { + obj[atrrName] = attrMap[atrrName]; + } + } + } +} + +function isLeafTag(obj, options){ + const { textNodeName } = options; + const propCount = Object.keys(obj).length; + + if (propCount === 0) { + return true; + } + + if ( + propCount === 1 && + (obj[textNodeName] || typeof obj[textNodeName] === "boolean" || obj[textNodeName] === 0) + ) { + return true; + } + + return false; +} +exports.prettify = prettify; + + +/***/ }), + +/***/ 9307: +/***/ ((module) => { + +"use strict"; + + +class XmlNode{ + constructor(tagname) { + this.tagname = tagname; + this.child = []; //nested tags, text, cdata, comments in order + this[":@"] = {}; //attributes map + } + add(key,val){ + // this.child.push( {name : key, val: val, isCdata: isCdata }); + if(key === "__proto__") key = "#__proto__"; + this.child.push( {[key]: val }); + } + addChild(node) { + if(node.tagname === "__proto__") node.tagname = "#__proto__"; + if(node[":@"] && Object.keys(node[":@"]).length > 0){ + this.child.push( { [node.tagname]: node.child, [":@"]: node[":@"] }); + }else{ + this.child.push( { [node.tagname]: node.child }); + } + }; +}; + + +module.exports = XmlNode; + +/***/ }), + +/***/ 7506: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +var _a; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GaxiosError = exports.GAXIOS_ERROR_SYMBOL = void 0; +exports.defaultErrorRedactor = defaultErrorRedactor; +const url_1 = __nccwpck_require__(7016); +const util_1 = __nccwpck_require__(3155); +const extend_1 = __importDefault(__nccwpck_require__(3860)); +/** + * Support `instanceof` operator for `GaxiosError`s in different versions of this library. + * + * @see {@link GaxiosError[Symbol.hasInstance]} + */ +exports.GAXIOS_ERROR_SYMBOL = Symbol.for(`${util_1.pkg.name}-gaxios-error`); +/* eslint-disable-next-line @typescript-eslint/no-explicit-any */ +class GaxiosError extends Error { + /** + * Support `instanceof` operator for `GaxiosError` across builds/duplicated files. + * + * @see {@link GAXIOS_ERROR_SYMBOL} + * @see {@link GaxiosError[GAXIOS_ERROR_SYMBOL]} + */ + static [(_a = exports.GAXIOS_ERROR_SYMBOL, Symbol.hasInstance)](instance) { + if (instance && + typeof instance === 'object' && + exports.GAXIOS_ERROR_SYMBOL in instance && + instance[exports.GAXIOS_ERROR_SYMBOL] === util_1.pkg.version) { + return true; + } + // fallback to native + return Function.prototype[Symbol.hasInstance].call(GaxiosError, instance); + } + constructor(message, config, response, error) { + var _b; + super(message); + this.config = config; + this.response = response; + this.error = error; + /** + * Support `instanceof` operator for `GaxiosError` across builds/duplicated files. + * + * @see {@link GAXIOS_ERROR_SYMBOL} + * @see {@link GaxiosError[Symbol.hasInstance]} + * @see {@link https://github.com/microsoft/TypeScript/issues/13965#issuecomment-278570200} + * @see {@link https://stackoverflow.com/questions/46618852/require-and-instanceof} + * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/@@hasInstance#reverting_to_default_instanceof_behavior} + */ + this[_a] = util_1.pkg.version; + // deep-copy config as we do not want to mutate + // the existing config for future retries/use + this.config = (0, extend_1.default)(true, {}, config); + if (this.response) { + this.response.config = (0, extend_1.default)(true, {}, this.response.config); + } + if (this.response) { + try { + this.response.data = translateData(this.config.responseType, (_b = this.response) === null || _b === void 0 ? void 0 : _b.data); + } + catch (_c) { + // best effort - don't throw an error within an error + // we could set `this.response.config.responseType = 'unknown'`, but + // that would mutate future calls with this config object. + } + this.status = this.response.status; + } + if (error && 'code' in error && error.code) { + this.code = error.code; + } + if (config.errorRedactor) { + config.errorRedactor({ + config: this.config, + response: this.response, + }); + } + } +} +exports.GaxiosError = GaxiosError; +function translateData(responseType, data) { + switch (responseType) { + case 'stream': + return data; + case 'json': + return JSON.parse(JSON.stringify(data)); + case 'arraybuffer': + return JSON.parse(Buffer.from(data).toString('utf8')); + case 'blob': + return JSON.parse(data.text()); + default: + return data; + } +} +/** + * An experimental error redactor. + * + * @param config Config to potentially redact properties of + * @param response Config to potentially redact properties of + * + * @experimental + */ +function defaultErrorRedactor(data) { + const REDACT = '< - See `errorRedactor` option in `gaxios` for configuration>.'; + function redactHeaders(headers) { + if (!headers) + return; + for (const key of Object.keys(headers)) { + // any casing of `Authentication` + if (/^authentication$/i.test(key)) { + headers[key] = REDACT; + } + // any casing of `Authorization` + if (/^authorization$/i.test(key)) { + headers[key] = REDACT; + } + // anything containing secret, such as 'client secret' + if (/secret/i.test(key)) { + headers[key] = REDACT; + } + } + } + function redactString(obj, key) { + if (typeof obj === 'object' && + obj !== null && + typeof obj[key] === 'string') { + const text = obj[key]; + if (/grant_type=/i.test(text) || + /assertion=/i.test(text) || + /secret/i.test(text)) { + obj[key] = REDACT; + } + } + } + function redactObject(obj) { + if (typeof obj === 'object' && obj !== null) { + if ('grant_type' in obj) { + obj['grant_type'] = REDACT; + } + if ('assertion' in obj) { + obj['assertion'] = REDACT; + } + if ('client_secret' in obj) { + obj['client_secret'] = REDACT; + } + } + } + if (data.config) { + redactHeaders(data.config.headers); + redactString(data.config, 'data'); + redactObject(data.config.data); + redactString(data.config, 'body'); + redactObject(data.config.body); + try { + const url = new url_1.URL('', data.config.url); + if (url.searchParams.has('token')) { + url.searchParams.set('token', REDACT); + } + if (url.searchParams.has('client_secret')) { + url.searchParams.set('client_secret', REDACT); + } + data.config.url = url.toString(); + } + catch (_b) { + // ignore error - no need to parse an invalid URL + } + } + if (data.response) { + defaultErrorRedactor({ config: data.response.config }); + redactHeaders(data.response.headers); + redactString(data.response, 'data'); + redactObject(data.response.data); + } + return data; +} +//# sourceMappingURL=common.js.map + +/***/ }), + +/***/ 6010: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +var _Gaxios_instances, _a, _Gaxios_urlMayUseProxy, _Gaxios_applyRequestInterceptors, _Gaxios_applyResponseInterceptors, _Gaxios_prepareRequest, _Gaxios_proxyAgent, _Gaxios_getProxyAgent; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Gaxios = void 0; +const extend_1 = __importDefault(__nccwpck_require__(3860)); +const https_1 = __nccwpck_require__(5692); +const node_fetch_1 = __importDefault(__nccwpck_require__(6705)); +const querystring_1 = __importDefault(__nccwpck_require__(3480)); +const is_stream_1 = __importDefault(__nccwpck_require__(6543)); +const url_1 = __nccwpck_require__(7016); +const common_1 = __nccwpck_require__(7506); +const retry_1 = __nccwpck_require__(2789); +const stream_1 = __nccwpck_require__(2203); +const uuid_1 = __nccwpck_require__(3187); +const interceptor_1 = __nccwpck_require__(5608); +/* eslint-disable @typescript-eslint/no-explicit-any */ +const fetch = hasFetch() ? window.fetch : node_fetch_1.default; +function hasWindow() { + return typeof window !== 'undefined' && !!window; +} +function hasFetch() { + return hasWindow() && !!window.fetch; +} +function hasBuffer() { + return typeof Buffer !== 'undefined'; +} +function hasHeader(options, header) { + return !!getHeader(options, header); +} +function getHeader(options, header) { + header = header.toLowerCase(); + for (const key of Object.keys((options === null || options === void 0 ? void 0 : options.headers) || {})) { + if (header === key.toLowerCase()) { + return options.headers[key]; + } + } + return undefined; +} +class Gaxios { + /** + * The Gaxios class is responsible for making HTTP requests. + * @param defaults The default set of options to be used for this instance. + */ + constructor(defaults) { + _Gaxios_instances.add(this); + this.agentCache = new Map(); + this.defaults = defaults || {}; + this.interceptors = { + request: new interceptor_1.GaxiosInterceptorManager(), + response: new interceptor_1.GaxiosInterceptorManager(), + }; + } + /** + * Perform an HTTP request with the given options. + * @param opts Set of HTTP options that will be used for this HTTP request. + */ + async request(opts = {}) { + opts = await __classPrivateFieldGet(this, _Gaxios_instances, "m", _Gaxios_prepareRequest).call(this, opts); + opts = await __classPrivateFieldGet(this, _Gaxios_instances, "m", _Gaxios_applyRequestInterceptors).call(this, opts); + return __classPrivateFieldGet(this, _Gaxios_instances, "m", _Gaxios_applyResponseInterceptors).call(this, this._request(opts)); + } + async _defaultAdapter(opts) { + const fetchImpl = opts.fetchImplementation || fetch; + const res = (await fetchImpl(opts.url, opts)); + const data = await this.getResponseData(opts, res); + return this.translateResponse(opts, res, data); + } + /** + * Internal, retryable version of the `request` method. + * @param opts Set of HTTP options that will be used for this HTTP request. + */ + async _request(opts = {}) { + var _b; + try { + let translatedResponse; + if (opts.adapter) { + translatedResponse = await opts.adapter(opts, this._defaultAdapter.bind(this)); + } + else { + translatedResponse = await this._defaultAdapter(opts); + } + if (!opts.validateStatus(translatedResponse.status)) { + if (opts.responseType === 'stream') { + let response = ''; + await new Promise(resolve => { + (translatedResponse === null || translatedResponse === void 0 ? void 0 : translatedResponse.data).on('data', chunk => { + response += chunk; + }); + (translatedResponse === null || translatedResponse === void 0 ? void 0 : translatedResponse.data).on('end', resolve); + }); + translatedResponse.data = response; + } + throw new common_1.GaxiosError(`Request failed with status code ${translatedResponse.status}`, opts, translatedResponse); + } + return translatedResponse; + } + catch (e) { + const err = e instanceof common_1.GaxiosError + ? e + : new common_1.GaxiosError(e.message, opts, undefined, e); + const { shouldRetry, config } = await (0, retry_1.getRetryConfig)(err); + if (shouldRetry && config) { + err.config.retryConfig.currentRetryAttempt = + config.retryConfig.currentRetryAttempt; + // The error's config could be redacted - therefore we only want to + // copy the retry state over to the existing config + opts.retryConfig = (_b = err.config) === null || _b === void 0 ? void 0 : _b.retryConfig; + return this._request(opts); + } + throw err; + } + } + async getResponseData(opts, res) { + switch (opts.responseType) { + case 'stream': + return res.body; + case 'json': { + let data = await res.text(); + try { + data = JSON.parse(data); + } + catch (_b) { + // continue + } + return data; + } + case 'arraybuffer': + return res.arrayBuffer(); + case 'blob': + return res.blob(); + case 'text': + return res.text(); + default: + return this.getResponseDataFromContentType(res); + } + } + /** + * By default, throw for any non-2xx status code + * @param status status code from the HTTP response + */ + validateStatus(status) { + return status >= 200 && status < 300; + } + /** + * Encode a set of key/value pars into a querystring format (?foo=bar&baz=boo) + * @param params key value pars to encode + */ + paramsSerializer(params) { + return querystring_1.default.stringify(params); + } + translateResponse(opts, res, data) { + // headers need to be converted from a map to an obj + const headers = {}; + res.headers.forEach((value, key) => { + headers[key] = value; + }); + return { + config: opts, + data: data, + headers, + status: res.status, + statusText: res.statusText, + // XMLHttpRequestLike + request: { + responseURL: res.url, + }, + }; + } + /** + * Attempts to parse a response by looking at the Content-Type header. + * @param {FetchResponse} response the HTTP response. + * @returns {Promise} a promise that resolves to the response data. + */ + async getResponseDataFromContentType(response) { + let contentType = response.headers.get('Content-Type'); + if (contentType === null) { + // Maintain existing functionality by calling text() + return response.text(); + } + contentType = contentType.toLowerCase(); + if (contentType.includes('application/json')) { + let data = await response.text(); + try { + data = JSON.parse(data); + } + catch (_b) { + // continue + } + return data; + } + else if (contentType.match(/^text\//)) { + return response.text(); + } + else { + // If the content type is something not easily handled, just return the raw data (blob) + return response.blob(); + } + } + /** + * Creates an async generator that yields the pieces of a multipart/related request body. + * This implementation follows the spec: https://www.ietf.org/rfc/rfc2387.txt. However, recursive + * multipart/related requests are not currently supported. + * + * @param {GaxioMultipartOptions[]} multipartOptions the pieces to turn into a multipart/related body. + * @param {string} boundary the boundary string to be placed between each part. + */ + async *getMultipartRequest(multipartOptions, boundary) { + const finale = `--${boundary}--`; + for (const currentPart of multipartOptions) { + const partContentType = currentPart.headers['Content-Type'] || 'application/octet-stream'; + const preamble = `--${boundary}\r\nContent-Type: ${partContentType}\r\n\r\n`; + yield preamble; + if (typeof currentPart.content === 'string') { + yield currentPart.content; + } + else { + yield* currentPart.content; + } + yield '\r\n'; + } + yield finale; + } +} +exports.Gaxios = Gaxios; +_a = Gaxios, _Gaxios_instances = new WeakSet(), _Gaxios_urlMayUseProxy = function _Gaxios_urlMayUseProxy(url, noProxy = []) { + var _b, _c; + const candidate = new url_1.URL(url); + const noProxyList = [...noProxy]; + const noProxyEnvList = ((_c = ((_b = process.env.NO_PROXY) !== null && _b !== void 0 ? _b : process.env.no_proxy)) === null || _c === void 0 ? void 0 : _c.split(',')) || []; + for (const rule of noProxyEnvList) { + noProxyList.push(rule.trim()); + } + for (const rule of noProxyList) { + // Match regex + if (rule instanceof RegExp) { + if (rule.test(candidate.toString())) { + return false; + } + } + // Match URL + else if (rule instanceof url_1.URL) { + if (rule.origin === candidate.origin) { + return false; + } + } + // Match string regex + else if (rule.startsWith('*.') || rule.startsWith('.')) { + const cleanedRule = rule.replace(/^\*\./, '.'); + if (candidate.hostname.endsWith(cleanedRule)) { + return false; + } + } + // Basic string match + else if (rule === candidate.origin || + rule === candidate.hostname || + rule === candidate.href) { + return false; + } + } + return true; +}, _Gaxios_applyRequestInterceptors = +/** + * Applies the request interceptors. The request interceptors are applied after the + * call to prepareRequest is completed. + * + * @param {GaxiosOptions} options The current set of options. + * + * @returns {Promise} Promise that resolves to the set of options or response after interceptors are applied. + */ +async function _Gaxios_applyRequestInterceptors(options) { + let promiseChain = Promise.resolve(options); + for (const interceptor of this.interceptors.request.values()) { + if (interceptor) { + promiseChain = promiseChain.then(interceptor.resolved, interceptor.rejected); + } + } + return promiseChain; +}, _Gaxios_applyResponseInterceptors = +/** + * Applies the response interceptors. The response interceptors are applied after the + * call to request is made. + * + * @param {GaxiosOptions} options The current set of options. + * + * @returns {Promise} Promise that resolves to the set of options or response after interceptors are applied. + */ +async function _Gaxios_applyResponseInterceptors(response) { + let promiseChain = Promise.resolve(response); + for (const interceptor of this.interceptors.response.values()) { + if (interceptor) { + promiseChain = promiseChain.then(interceptor.resolved, interceptor.rejected); + } + } + return promiseChain; +}, _Gaxios_prepareRequest = +/** + * Validates the options, merges them with defaults, and prepare request. + * + * @param options The original options passed from the client. + * @returns Prepared options, ready to make a request + */ +async function _Gaxios_prepareRequest(options) { + var _b, _c, _d, _e; + const opts = (0, extend_1.default)(true, {}, this.defaults, options); + if (!opts.url) { + throw new Error('URL is required.'); + } + // baseUrl has been deprecated, remove in 2.0 + const baseUrl = opts.baseUrl || opts.baseURL; + if (baseUrl) { + opts.url = baseUrl.toString() + opts.url; + } + opts.paramsSerializer = opts.paramsSerializer || this.paramsSerializer; + if (opts.params && Object.keys(opts.params).length > 0) { + let additionalQueryParams = opts.paramsSerializer(opts.params); + if (additionalQueryParams.startsWith('?')) { + additionalQueryParams = additionalQueryParams.slice(1); + } + const prefix = opts.url.toString().includes('?') ? '&' : '?'; + opts.url = opts.url + prefix + additionalQueryParams; + } + if (typeof options.maxContentLength === 'number') { + opts.size = options.maxContentLength; + } + if (typeof options.maxRedirects === 'number') { + opts.follow = options.maxRedirects; + } + opts.headers = opts.headers || {}; + if (opts.multipart === undefined && opts.data) { + const isFormData = typeof FormData === 'undefined' + ? false + : (opts === null || opts === void 0 ? void 0 : opts.data) instanceof FormData; + if (is_stream_1.default.readable(opts.data)) { + opts.body = opts.data; + } + else if (hasBuffer() && Buffer.isBuffer(opts.data)) { + // Do not attempt to JSON.stringify() a Buffer: + opts.body = opts.data; + if (!hasHeader(opts, 'Content-Type')) { + opts.headers['Content-Type'] = 'application/json'; + } + } + else if (typeof opts.data === 'object') { + // If www-form-urlencoded content type has been set, but data is + // provided as an object, serialize the content using querystring: + if (!isFormData) { + if (getHeader(opts, 'content-type') === + 'application/x-www-form-urlencoded') { + opts.body = opts.paramsSerializer(opts.data); + } + else { + // } else if (!(opts.data instanceof FormData)) { + if (!hasHeader(opts, 'Content-Type')) { + opts.headers['Content-Type'] = 'application/json'; + } + opts.body = JSON.stringify(opts.data); + } + } + } + else { + opts.body = opts.data; + } + } + else if (opts.multipart && opts.multipart.length > 0) { + // note: once the minimum version reaches Node 16, + // this can be replaced with randomUUID() function from crypto + // and the dependency on UUID removed + const boundary = (0, uuid_1.v4)(); + opts.headers['Content-Type'] = `multipart/related; boundary=${boundary}`; + const bodyStream = new stream_1.PassThrough(); + opts.body = bodyStream; + (0, stream_1.pipeline)(this.getMultipartRequest(opts.multipart, boundary), bodyStream, () => { }); + } + opts.validateStatus = opts.validateStatus || this.validateStatus; + opts.responseType = opts.responseType || 'unknown'; + if (!opts.headers['Accept'] && opts.responseType === 'json') { + opts.headers['Accept'] = 'application/json'; + } + opts.method = opts.method || 'GET'; + const proxy = opts.proxy || + ((_b = process === null || process === void 0 ? void 0 : process.env) === null || _b === void 0 ? void 0 : _b.HTTPS_PROXY) || + ((_c = process === null || process === void 0 ? void 0 : process.env) === null || _c === void 0 ? void 0 : _c.https_proxy) || + ((_d = process === null || process === void 0 ? void 0 : process.env) === null || _d === void 0 ? void 0 : _d.HTTP_PROXY) || + ((_e = process === null || process === void 0 ? void 0 : process.env) === null || _e === void 0 ? void 0 : _e.http_proxy); + const urlMayUseProxy = __classPrivateFieldGet(this, _Gaxios_instances, "m", _Gaxios_urlMayUseProxy).call(this, opts.url, opts.noProxy); + if (opts.agent) { + // don't do any of the following options - use the user-provided agent. + } + else if (proxy && urlMayUseProxy) { + const HttpsProxyAgent = await __classPrivateFieldGet(_a, _a, "m", _Gaxios_getProxyAgent).call(_a); + if (this.agentCache.has(proxy)) { + opts.agent = this.agentCache.get(proxy); + } + else { + opts.agent = new HttpsProxyAgent(proxy, { + cert: opts.cert, + key: opts.key, + }); + this.agentCache.set(proxy, opts.agent); + } + } + else if (opts.cert && opts.key) { + // Configure client for mTLS + if (this.agentCache.has(opts.key)) { + opts.agent = this.agentCache.get(opts.key); + } + else { + opts.agent = new https_1.Agent({ + cert: opts.cert, + key: opts.key, + }); + this.agentCache.set(opts.key, opts.agent); + } + } + if (typeof opts.errorRedactor !== 'function' && + opts.errorRedactor !== false) { + opts.errorRedactor = common_1.defaultErrorRedactor; + } + return opts; +}, _Gaxios_getProxyAgent = async function _Gaxios_getProxyAgent() { + __classPrivateFieldSet(this, _a, __classPrivateFieldGet(this, _a, "f", _Gaxios_proxyAgent) || (await Promise.resolve().then(() => __importStar(__nccwpck_require__(3669)))).HttpsProxyAgent, "f", _Gaxios_proxyAgent); + return __classPrivateFieldGet(this, _a, "f", _Gaxios_proxyAgent); +}; +/** + * A cache for the lazily-loaded proxy agent. + * + * Should use {@link Gaxios[#getProxyAgent]} to retrieve. + */ +// using `import` to dynamically import the types here +_Gaxios_proxyAgent = { value: void 0 }; +//# sourceMappingURL=gaxios.js.map + +/***/ }), + +/***/ 7003: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.instance = exports.Gaxios = exports.GaxiosError = void 0; +exports.request = request; +const gaxios_1 = __nccwpck_require__(6010); +Object.defineProperty(exports, "Gaxios", ({ enumerable: true, get: function () { return gaxios_1.Gaxios; } })); +var common_1 = __nccwpck_require__(7506); +Object.defineProperty(exports, "GaxiosError", ({ enumerable: true, get: function () { return common_1.GaxiosError; } })); +__exportStar(__nccwpck_require__(5608), exports); +/** + * The default instance used when the `request` method is directly + * invoked. + */ +exports.instance = new gaxios_1.Gaxios(); +/** + * Make an HTTP request using the given options. + * @param opts Options for the request + */ +async function request(opts) { + return exports.instance.request(opts); +} +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 5608: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2024 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GaxiosInterceptorManager = void 0; +/** + * Class to manage collections of GaxiosInterceptors for both requests and responses. + */ +class GaxiosInterceptorManager extends Set { +} +exports.GaxiosInterceptorManager = GaxiosInterceptorManager; +//# sourceMappingURL=interceptor.js.map + +/***/ }), + +/***/ 2789: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getRetryConfig = getRetryConfig; +async function getRetryConfig(err) { + let config = getConfig(err); + if (!err || !err.config || (!config && !err.config.retry)) { + return { shouldRetry: false }; + } + config = config || {}; + config.currentRetryAttempt = config.currentRetryAttempt || 0; + config.retry = + config.retry === undefined || config.retry === null ? 3 : config.retry; + config.httpMethodsToRetry = config.httpMethodsToRetry || [ + 'GET', + 'HEAD', + 'PUT', + 'OPTIONS', + 'DELETE', + ]; + config.noResponseRetries = + config.noResponseRetries === undefined || config.noResponseRetries === null + ? 2 + : config.noResponseRetries; + config.retryDelayMultiplier = config.retryDelayMultiplier + ? config.retryDelayMultiplier + : 2; + config.timeOfFirstRequest = config.timeOfFirstRequest + ? config.timeOfFirstRequest + : Date.now(); + config.totalTimeout = config.totalTimeout + ? config.totalTimeout + : Number.MAX_SAFE_INTEGER; + config.maxRetryDelay = config.maxRetryDelay + ? config.maxRetryDelay + : Number.MAX_SAFE_INTEGER; + // If this wasn't in the list of status codes where we want + // to automatically retry, return. + const retryRanges = [ + // https://en.wikipedia.org/wiki/List_of_HTTP_status_codes + // 1xx - Retry (Informational, request still processing) + // 2xx - Do not retry (Success) + // 3xx - Do not retry (Redirect) + // 4xx - Do not retry (Client errors) + // 408 - Retry ("Request Timeout") + // 429 - Retry ("Too Many Requests") + // 5xx - Retry (Server errors) + [100, 199], + [408, 408], + [429, 429], + [500, 599], + ]; + config.statusCodesToRetry = config.statusCodesToRetry || retryRanges; + // Put the config back into the err + err.config.retryConfig = config; + // Determine if we should retry the request + const shouldRetryFn = config.shouldRetry || shouldRetryRequest; + if (!(await shouldRetryFn(err))) { + return { shouldRetry: false, config: err.config }; + } + const delay = getNextRetryDelay(config); + // We're going to retry! Incremenent the counter. + err.config.retryConfig.currentRetryAttempt += 1; + // Create a promise that invokes the retry after the backOffDelay + const backoff = config.retryBackoff + ? config.retryBackoff(err, delay) + : new Promise(resolve => { + setTimeout(resolve, delay); + }); + // Notify the user if they added an `onRetryAttempt` handler + if (config.onRetryAttempt) { + config.onRetryAttempt(err); + } + // Return the promise in which recalls Gaxios to retry the request + await backoff; + return { shouldRetry: true, config: err.config }; +} +/** + * Determine based on config if we should retry the request. + * @param err The GaxiosError passed to the interceptor. + */ +function shouldRetryRequest(err) { + var _a; + const config = getConfig(err); + // node-fetch raises an AbortError if signaled: + // https://github.com/bitinn/node-fetch#request-cancellation-with-abortsignal + if (err.name === 'AbortError' || ((_a = err.error) === null || _a === void 0 ? void 0 : _a.name) === 'AbortError') { + return false; + } + // If there's no config, or retries are disabled, return. + if (!config || config.retry === 0) { + return false; + } + // Check if this error has no response (ETIMEDOUT, ENOTFOUND, etc) + if (!err.response && + (config.currentRetryAttempt || 0) >= config.noResponseRetries) { + return false; + } + // Only retry with configured HttpMethods. + if (!err.config.method || + config.httpMethodsToRetry.indexOf(err.config.method.toUpperCase()) < 0) { + return false; + } + // If this wasn't in the list of status codes where we want + // to automatically retry, return. + if (err.response && err.response.status) { + let isInRange = false; + for (const [min, max] of config.statusCodesToRetry) { + const status = err.response.status; + if (status >= min && status <= max) { + isInRange = true; + break; + } + } + if (!isInRange) { + return false; + } + } + // If we are out of retry attempts, return + config.currentRetryAttempt = config.currentRetryAttempt || 0; + if (config.currentRetryAttempt >= config.retry) { + return false; + } + return true; +} +/** + * Acquire the raxConfig object from an GaxiosError if available. + * @param err The Gaxios error with a config object. + */ +function getConfig(err) { + if (err && err.config && err.config.retryConfig) { + return err.config.retryConfig; + } + return; +} +/** + * Gets the delay to wait before the next retry. + * + * @param {RetryConfig} config The current set of retry options + * @returns {number} the amount of ms to wait before the next retry attempt. + */ +function getNextRetryDelay(config) { + var _a; + // Calculate time to wait with exponential backoff. + // If this is the first retry, look for a configured retryDelay. + const retryDelay = config.currentRetryAttempt ? 0 : (_a = config.retryDelay) !== null && _a !== void 0 ? _a : 100; + // Formula: retryDelay + ((retryDelayMultiplier^currentRetryAttempt - 1 / 2) * 1000) + const calculatedDelay = retryDelay + + ((Math.pow(config.retryDelayMultiplier, config.currentRetryAttempt) - 1) / + 2) * + 1000; + const maxAllowableDelay = config.totalTimeout - (Date.now() - config.timeOfFirstRequest); + return Math.min(calculatedDelay, maxAllowableDelay, config.maxRetryDelay); +} +//# sourceMappingURL=retry.js.map + +/***/ }), + +/***/ 3155: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2023 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.pkg = void 0; +exports.pkg = __nccwpck_require__(6495); +//# sourceMappingURL=util.js.map + +/***/ }), + +/***/ 3187: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +Object.defineProperty(exports, "NIL", ({ + enumerable: true, + get: function () { + return _nil.default; + } +})); +Object.defineProperty(exports, "parse", ({ + enumerable: true, + get: function () { + return _parse.default; + } +})); +Object.defineProperty(exports, "stringify", ({ + enumerable: true, + get: function () { + return _stringify.default; + } +})); +Object.defineProperty(exports, "v1", ({ + enumerable: true, + get: function () { + return _v.default; + } +})); +Object.defineProperty(exports, "v3", ({ + enumerable: true, + get: function () { + return _v2.default; + } +})); +Object.defineProperty(exports, "v4", ({ + enumerable: true, + get: function () { + return _v3.default; + } +})); +Object.defineProperty(exports, "v5", ({ + enumerable: true, + get: function () { + return _v4.default; + } +})); +Object.defineProperty(exports, "validate", ({ + enumerable: true, + get: function () { + return _validate.default; + } +})); +Object.defineProperty(exports, "version", ({ + enumerable: true, + get: function () { + return _version.default; + } +})); + +var _v = _interopRequireDefault(__nccwpck_require__(6698)); + +var _v2 = _interopRequireDefault(__nccwpck_require__(6272)); + +var _v3 = _interopRequireDefault(__nccwpck_require__(8485)); + +var _v4 = _interopRequireDefault(__nccwpck_require__(742)); + +var _nil = _interopRequireDefault(__nccwpck_require__(8212)); + +var _version = _interopRequireDefault(__nccwpck_require__(6651)); + +var _validate = _interopRequireDefault(__nccwpck_require__(5913)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(4386)); + +var _parse = _interopRequireDefault(__nccwpck_require__(9884)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/***/ }), + +/***/ 1979: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); +} + +var _default = md5; +exports["default"] = _default; + +/***/ }), + +/***/ 3968: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var _default = { + randomUUID: _crypto.default.randomUUID +}; +exports["default"] = _default; + +/***/ }), + +/***/ 8212: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports["default"] = _default; + +/***/ }), + +/***/ 9884: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(5913)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports["default"] = _default; + +/***/ }), + +/***/ 956: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports["default"] = _default; + +/***/ }), + +/***/ 4566: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = rng; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; + +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); + + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} + +/***/ }), + +/***/ 5398: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('sha1').update(bytes).digest(); +} + +var _default = sha1; +exports["default"] = _default; + +/***/ }), + +/***/ 4386: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +exports.unsafeStringify = unsafeStringify; + +var _validate = _interopRequireDefault(__nccwpck_require__(5913)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).slice(1)); +} + +function unsafeStringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]; +} + +function stringify(arr, offset = 0) { + const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports["default"] = _default; + +/***/ }), + +/***/ 6698: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(4566)); + +var _stringify = __nccwpck_require__(4386); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.unsafeStringify)(b); +} + +var _default = v1; +exports["default"] = _default; + +/***/ }), + +/***/ 6272: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(2781)); + +var _md = _interopRequireDefault(__nccwpck_require__(1979)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports["default"] = _default; + +/***/ }), + +/***/ 2781: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports.URL = exports.DNS = void 0; +exports["default"] = v35; + +var _stringify = __nccwpck_require__(4386); + +var _parse = _interopRequireDefault(__nccwpck_require__(9884)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function v35(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + var _namespace; + + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} + +/***/ }), + +/***/ 8485: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _native = _interopRequireDefault(__nccwpck_require__(3968)); + +var _rng = _interopRequireDefault(__nccwpck_require__(4566)); + +var _stringify = __nccwpck_require__(4386); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + if (_native.default.randomUUID && !buf && !options) { + return _native.default.randomUUID(); + } + + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(rnds); +} + +var _default = v4; +exports["default"] = _default; + +/***/ }), + +/***/ 742: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(2781)); + +var _sha = _interopRequireDefault(__nccwpck_require__(5398)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports["default"] = _default; + +/***/ }), + +/***/ 5913: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _regex = _interopRequireDefault(__nccwpck_require__(956)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports["default"] = _default; + +/***/ }), + +/***/ 6651: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(5913)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.slice(14, 15), 16); +} + +var _default = version; +exports["default"] = _default; + +/***/ }), + +/***/ 381: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GCE_LINUX_BIOS_PATHS = void 0; +exports.isGoogleCloudServerless = isGoogleCloudServerless; +exports.isGoogleComputeEngineLinux = isGoogleComputeEngineLinux; +exports.isGoogleComputeEngineMACAddress = isGoogleComputeEngineMACAddress; +exports.isGoogleComputeEngine = isGoogleComputeEngine; +exports.detectGCPResidency = detectGCPResidency; +const fs_1 = __nccwpck_require__(9896); +const os_1 = __nccwpck_require__(857); +/** + * Known paths unique to Google Compute Engine Linux instances + */ +exports.GCE_LINUX_BIOS_PATHS = { + BIOS_DATE: '/sys/class/dmi/id/bios_date', + BIOS_VENDOR: '/sys/class/dmi/id/bios_vendor', +}; +const GCE_MAC_ADDRESS_REGEX = /^42:01/; +/** + * Determines if the process is running on a Google Cloud Serverless environment (Cloud Run or Cloud Functions instance). + * + * Uses the: + * - {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. + * - {@link https://cloud.google.com/functions/docs/env-var Cloud Functions environment variables}. + * + * @returns {boolean} `true` if the process is running on GCP serverless, `false` otherwise. + */ +function isGoogleCloudServerless() { + /** + * `CLOUD_RUN_JOB` is used for Cloud Run Jobs + * - See {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. + * + * `FUNCTION_NAME` is used in older Cloud Functions environments: + * - See {@link https://cloud.google.com/functions/docs/env-var Python 3.7 and Go 1.11}. + * + * `K_SERVICE` is used in Cloud Run and newer Cloud Functions environments: + * - See {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. + * - See {@link https://cloud.google.com/functions/docs/env-var Cloud Functions newer runtimes}. + */ + const isGFEnvironment = process.env.CLOUD_RUN_JOB || + process.env.FUNCTION_NAME || + process.env.K_SERVICE; + return !!isGFEnvironment; +} +/** + * Determines if the process is running on a Linux Google Compute Engine instance. + * + * @returns {boolean} `true` if the process is running on Linux GCE, `false` otherwise. + */ +function isGoogleComputeEngineLinux() { + if ((0, os_1.platform)() !== 'linux') + return false; + try { + // ensure this file exist + (0, fs_1.statSync)(exports.GCE_LINUX_BIOS_PATHS.BIOS_DATE); + // ensure this file exist and matches + const biosVendor = (0, fs_1.readFileSync)(exports.GCE_LINUX_BIOS_PATHS.BIOS_VENDOR, 'utf8'); + return /Google/.test(biosVendor); + } + catch (_a) { + return false; + } +} +/** + * Determines if the process is running on a Google Compute Engine instance with a known + * MAC address. + * + * @returns {boolean} `true` if the process is running on GCE (as determined by MAC address), `false` otherwise. + */ +function isGoogleComputeEngineMACAddress() { + const interfaces = (0, os_1.networkInterfaces)(); + for (const item of Object.values(interfaces)) { + if (!item) + continue; + for (const { mac } of item) { + if (GCE_MAC_ADDRESS_REGEX.test(mac)) { + return true; + } + } + } + return false; +} +/** + * Determines if the process is running on a Google Compute Engine instance. + * + * @returns {boolean} `true` if the process is running on GCE, `false` otherwise. + */ +function isGoogleComputeEngine() { + return isGoogleComputeEngineLinux() || isGoogleComputeEngineMACAddress(); +} +/** + * Determines if the process is running on Google Cloud Platform. + * + * @returns {boolean} `true` if the process is running on GCP, `false` otherwise. + */ +function detectGCPResidency() { + return isGoogleCloudServerless() || isGoogleComputeEngine(); +} +//# sourceMappingURL=gcp-residency.js.map + +/***/ }), + +/***/ 3046: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +/** + * Copyright 2018 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.gcpResidencyCache = exports.METADATA_SERVER_DETECTION = exports.HEADERS = exports.HEADER_VALUE = exports.HEADER_NAME = exports.SECONDARY_HOST_ADDRESS = exports.HOST_ADDRESS = exports.BASE_PATH = void 0; +exports.instance = instance; +exports.project = project; +exports.universe = universe; +exports.bulk = bulk; +exports.isAvailable = isAvailable; +exports.resetIsAvailableCache = resetIsAvailableCache; +exports.getGCPResidency = getGCPResidency; +exports.setGCPResidency = setGCPResidency; +exports.requestTimeout = requestTimeout; +const gaxios_1 = __nccwpck_require__(7003); +const jsonBigint = __nccwpck_require__(4826); +const gcp_residency_1 = __nccwpck_require__(381); +const logger = __nccwpck_require__(1577); +exports.BASE_PATH = '/computeMetadata/v1'; +exports.HOST_ADDRESS = 'http://169.254.169.254'; +exports.SECONDARY_HOST_ADDRESS = 'http://metadata.google.internal.'; +exports.HEADER_NAME = 'Metadata-Flavor'; +exports.HEADER_VALUE = 'Google'; +exports.HEADERS = Object.freeze({ [exports.HEADER_NAME]: exports.HEADER_VALUE }); +const log = logger.log('gcp metadata'); +/** + * Metadata server detection override options. + * + * Available via `process.env.METADATA_SERVER_DETECTION`. + */ +exports.METADATA_SERVER_DETECTION = Object.freeze({ + 'assume-present': "don't try to ping the metadata server, but assume it's present", + none: "don't try to ping the metadata server, but don't try to use it either", + 'bios-only': "treat the result of a BIOS probe as canonical (don't fall back to pinging)", + 'ping-only': 'skip the BIOS probe, and go straight to pinging', +}); +/** + * Returns the base URL while taking into account the GCE_METADATA_HOST + * environment variable if it exists. + * + * @returns The base URL, e.g., http://169.254.169.254/computeMetadata/v1. + */ +function getBaseUrl(baseUrl) { + if (!baseUrl) { + baseUrl = + process.env.GCE_METADATA_IP || + process.env.GCE_METADATA_HOST || + exports.HOST_ADDRESS; + } + // If no scheme is provided default to HTTP: + if (!/^https?:\/\//.test(baseUrl)) { + baseUrl = `http://${baseUrl}`; + } + return new URL(exports.BASE_PATH, baseUrl).href; +} +// Accepts an options object passed from the user to the API. In previous +// versions of the API, it referred to a `Request` or an `Axios` request +// options object. Now it refers to an object with very limited property +// names. This is here to help ensure users don't pass invalid options when +// they upgrade from 0.4 to 0.5 to 0.8. +function validate(options) { + Object.keys(options).forEach(key => { + switch (key) { + case 'params': + case 'property': + case 'headers': + break; + case 'qs': + throw new Error("'qs' is not a valid configuration option. Please use 'params' instead."); + default: + throw new Error(`'${key}' is not a valid configuration option.`); + } + }); +} +async function metadataAccessor(type, options = {}, noResponseRetries = 3, fastFail = false) { + let metadataKey = ''; + let params = {}; + let headers = {}; + if (typeof type === 'object') { + const metadataAccessor = type; + metadataKey = metadataAccessor.metadataKey; + params = metadataAccessor.params || params; + headers = metadataAccessor.headers || headers; + noResponseRetries = metadataAccessor.noResponseRetries || noResponseRetries; + fastFail = metadataAccessor.fastFail || fastFail; + } + else { + metadataKey = type; + } + if (typeof options === 'string') { + metadataKey += `/${options}`; + } + else { + validate(options); + if (options.property) { + metadataKey += `/${options.property}`; + } + headers = options.headers || headers; + params = options.params || params; + } + const requestMethod = fastFail ? fastFailMetadataRequest : gaxios_1.request; + const req = { + url: `${getBaseUrl()}/${metadataKey}`, + headers: { ...exports.HEADERS, ...headers }, + retryConfig: { noResponseRetries }, + params, + responseType: 'text', + timeout: requestTimeout(), + }; + log.info('instance request %j', req); + const res = await requestMethod(req); + log.info('instance metadata is %s', res.data); + // NOTE: node.js converts all incoming headers to lower case. + if (res.headers[exports.HEADER_NAME.toLowerCase()] !== exports.HEADER_VALUE) { + throw new Error(`Invalid response from metadata service: incorrect ${exports.HEADER_NAME} header. Expected '${exports.HEADER_VALUE}', got ${res.headers[exports.HEADER_NAME.toLowerCase()] ? `'${res.headers[exports.HEADER_NAME.toLowerCase()]}'` : 'no header'}`); + } + if (typeof res.data === 'string') { + try { + return jsonBigint.parse(res.data); + } + catch (_a) { + /* ignore */ + } + } + return res.data; +} +async function fastFailMetadataRequest(options) { + var _a; + const secondaryOptions = { + ...options, + url: (_a = options.url) === null || _a === void 0 ? void 0 : _a.toString().replace(getBaseUrl(), getBaseUrl(exports.SECONDARY_HOST_ADDRESS)), + }; + // We race a connection between DNS/IP to metadata server. There are a couple + // reasons for this: + // + // 1. the DNS is slow in some GCP environments; by checking both, we might + // detect the runtime environment signficantly faster. + // 2. we can't just check the IP, which is tarpitted and slow to respond + // on a user's local machine. + // + // Additional logic has been added to make sure that we don't create an + // unhandled rejection in scenarios where a failure happens sometime + // after a success. + // + // Note, however, if a failure happens prior to a success, a rejection should + // occur, this is for folks running locally. + // + let responded = false; + const r1 = (0, gaxios_1.request)(options) + .then(res => { + responded = true; + return res; + }) + .catch(err => { + if (responded) { + return r2; + } + else { + responded = true; + throw err; + } + }); + const r2 = (0, gaxios_1.request)(secondaryOptions) + .then(res => { + responded = true; + return res; + }) + .catch(err => { + if (responded) { + return r1; + } + else { + responded = true; + throw err; + } + }); + return Promise.race([r1, r2]); +} +/** + * Obtain metadata for the current GCE instance. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const serviceAccount: {} = await instance('service-accounts/'); + * const serviceAccountEmail: string = await instance('service-accounts/default/email'); + * ``` + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function instance(options) { + return metadataAccessor('instance', options); +} +/** + * Obtain metadata for the current GCP project. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const projectId: string = await project('project-id'); + * const numericProjectId: number = await project('numeric-project-id'); + * ``` + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function project(options) { + return metadataAccessor('project', options); +} +/** + * Obtain metadata for the current universe. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const universeDomain: string = await universe('universe-domain'); + * ``` + */ +function universe(options) { + return metadataAccessor('universe', options); +} +/** + * Retrieve metadata items in parallel. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const data = await bulk([ + * { + * metadataKey: 'instance', + * }, + * { + * metadataKey: 'project/project-id', + * }, + * ] as const); + * + * // data.instance; + * // data['project/project-id']; + * ``` + * + * @param properties The metadata properties to retrieve + * @returns The metadata in `metadatakey:value` format + */ +async function bulk(properties) { + const r = {}; + await Promise.all(properties.map(item => { + return (async () => { + const res = await metadataAccessor(item); + const key = item.metadataKey; + r[key] = res; + })(); + })); + return r; +} +/* + * How many times should we retry detecting GCP environment. + */ +function detectGCPAvailableRetries() { + return process.env.DETECT_GCP_RETRIES + ? Number(process.env.DETECT_GCP_RETRIES) + : 0; +} +let cachedIsAvailableResponse; +/** + * Determine if the metadata server is currently available. + */ +async function isAvailable() { + if (process.env.METADATA_SERVER_DETECTION) { + const value = process.env.METADATA_SERVER_DETECTION.trim().toLocaleLowerCase(); + if (!(value in exports.METADATA_SERVER_DETECTION)) { + throw new RangeError(`Unknown \`METADATA_SERVER_DETECTION\` env variable. Got \`${value}\`, but it should be \`${Object.keys(exports.METADATA_SERVER_DETECTION).join('`, `')}\`, or unset`); + } + switch (value) { + case 'assume-present': + return true; + case 'none': + return false; + case 'bios-only': + return getGCPResidency(); + case 'ping-only': + // continue, we want to ping the server + } + } + try { + // If a user is instantiating several GCP libraries at the same time, + // this may result in multiple calls to isAvailable(), to detect the + // runtime environment. We use the same promise for each of these calls + // to reduce the network load. + if (cachedIsAvailableResponse === undefined) { + cachedIsAvailableResponse = metadataAccessor('instance', undefined, detectGCPAvailableRetries(), + // If the default HOST_ADDRESS has been overridden, we should not + // make an effort to try SECONDARY_HOST_ADDRESS (as we are likely in + // a non-GCP environment): + !(process.env.GCE_METADATA_IP || process.env.GCE_METADATA_HOST)); + } + await cachedIsAvailableResponse; + return true; + } + catch (e) { + const err = e; + if (process.env.DEBUG_AUTH) { + console.info(err); + } + if (err.type === 'request-timeout') { + // If running in a GCP environment, metadata endpoint should return + // within ms. + return false; + } + if (err.response && err.response.status === 404) { + return false; + } + else { + if (!(err.response && err.response.status === 404) && + // A warning is emitted if we see an unexpected err.code, or err.code + // is not populated: + (!err.code || + ![ + 'EHOSTDOWN', + 'EHOSTUNREACH', + 'ENETUNREACH', + 'ENOENT', + 'ENOTFOUND', + 'ECONNREFUSED', + ].includes(err.code))) { + let code = 'UNKNOWN'; + if (err.code) + code = err.code; + process.emitWarning(`received unexpected error = ${err.message} code = ${code}`, 'MetadataLookupWarning'); + } + // Failure to resolve the metadata service means that it is not available. + return false; + } + } +} +/** + * reset the memoized isAvailable() lookup. + */ +function resetIsAvailableCache() { + cachedIsAvailableResponse = undefined; +} +/** + * A cache for the detected GCP Residency. + */ +exports.gcpResidencyCache = null; +/** + * Detects GCP Residency. + * Caches results to reduce costs for subsequent calls. + * + * @see setGCPResidency for setting + */ +function getGCPResidency() { + if (exports.gcpResidencyCache === null) { + setGCPResidency(); + } + return exports.gcpResidencyCache; +} +/** + * Sets the detected GCP Residency. + * Useful for forcing metadata server detection behavior. + * + * Set `null` to autodetect the environment (default behavior). + * @see getGCPResidency for getting + */ +function setGCPResidency(value = null) { + exports.gcpResidencyCache = value !== null ? value : (0, gcp_residency_1.detectGCPResidency)(); +} +/** + * Obtain the timeout for requests to the metadata server. + * + * In certain environments and conditions requests can take longer than + * the default timeout to complete. This function will determine the + * appropriate timeout based on the environment. + * + * @returns {number} a request timeout duration in milliseconds. + */ +function requestTimeout() { + return getGCPResidency() ? 0 : 3000; +} +__exportStar(__nccwpck_require__(381), exports); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 4810: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2012 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AuthClient = exports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS = exports.DEFAULT_UNIVERSE = void 0; +const events_1 = __nccwpck_require__(4434); +const gaxios_1 = __nccwpck_require__(7003); +const transporters_1 = __nccwpck_require__(7633); +const util_1 = __nccwpck_require__(7870); +/** + * The default cloud universe + * + * @see {@link AuthJSONOptions.universe_domain} + */ +exports.DEFAULT_UNIVERSE = 'googleapis.com'; +/** + * The default {@link AuthClientOptions.eagerRefreshThresholdMillis} + */ +exports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS = 5 * 60 * 1000; +class AuthClient extends events_1.EventEmitter { + constructor(opts = {}) { + var _a, _b, _c, _d, _e; + super(); + this.credentials = {}; + this.eagerRefreshThresholdMillis = exports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS; + this.forceRefreshOnFailure = false; + this.universeDomain = exports.DEFAULT_UNIVERSE; + const options = (0, util_1.originalOrCamelOptions)(opts); + // Shared auth options + this.apiKey = opts.apiKey; + this.projectId = (_a = options.get('project_id')) !== null && _a !== void 0 ? _a : null; + this.quotaProjectId = options.get('quota_project_id'); + this.credentials = (_b = options.get('credentials')) !== null && _b !== void 0 ? _b : {}; + this.universeDomain = (_c = options.get('universe_domain')) !== null && _c !== void 0 ? _c : exports.DEFAULT_UNIVERSE; + // Shared client options + this.transporter = (_d = opts.transporter) !== null && _d !== void 0 ? _d : new transporters_1.DefaultTransporter(); + if (opts.transporterOptions) { + this.transporter.defaults = opts.transporterOptions; + } + if (opts.eagerRefreshThresholdMillis) { + this.eagerRefreshThresholdMillis = opts.eagerRefreshThresholdMillis; + } + this.forceRefreshOnFailure = (_e = opts.forceRefreshOnFailure) !== null && _e !== void 0 ? _e : false; + } + /** + * Return the {@link Gaxios `Gaxios`} instance from the {@link AuthClient.transporter}. + * + * @expiremental + */ + get gaxios() { + if (this.transporter instanceof gaxios_1.Gaxios) { + return this.transporter; + } + else if (this.transporter instanceof transporters_1.DefaultTransporter) { + return this.transporter.instance; + } + else if ('instance' in this.transporter && + this.transporter.instance instanceof gaxios_1.Gaxios) { + return this.transporter.instance; + } + return null; + } + /** + * Sets the auth credentials. + */ + setCredentials(credentials) { + this.credentials = credentials; + } + /** + * Append additional headers, e.g., x-goog-user-project, shared across the + * classes inheriting AuthClient. This method should be used by any method + * that overrides getRequestMetadataAsync(), which is a shared helper for + * setting request information in both gRPC and HTTP API calls. + * + * @param headers object to append additional headers to. + */ + addSharedMetadataHeaders(headers) { + // quota_project_id, stored in application_default_credentials.json, is set in + // the x-goog-user-project header, to indicate an alternate account for + // billing and quota: + if (!headers['x-goog-user-project'] && // don't override a value the user sets. + this.quotaProjectId) { + headers['x-goog-user-project'] = this.quotaProjectId; + } + return headers; + } + /** + * Retry config for Auth-related requests. + * + * @remarks + * + * This is not a part of the default {@link AuthClient.transporter transporter/gaxios} + * config as some downstream APIs would prefer if customers explicitly enable retries, + * such as GCS. + */ + static get RETRY_CONFIG() { + return { + retry: true, + retryConfig: { + httpMethodsToRetry: ['GET', 'PUT', 'POST', 'HEAD', 'OPTIONS', 'DELETE'], + }, + }; + } +} +exports.AuthClient = AuthClient; + + +/***/ }), + +/***/ 1261: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _a, _AwsClient_DEFAULT_AWS_REGIONAL_CREDENTIAL_VERIFICATION_URL; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AwsClient = void 0; +const awsrequestsigner_1 = __nccwpck_require__(7647); +const baseexternalclient_1 = __nccwpck_require__(142); +const defaultawssecuritycredentialssupplier_1 = __nccwpck_require__(9157); +const util_1 = __nccwpck_require__(7870); +/** + * AWS external account client. This is used for AWS workloads, where + * AWS STS GetCallerIdentity serialized signed requests are exchanged for + * GCP access token. + */ +class AwsClient extends baseexternalclient_1.BaseExternalAccountClient { + /** + * Instantiates an AwsClient instance using the provided JSON + * object loaded from an external account credentials file. + * An error is thrown if the credential is not a valid AWS credential. + * @param options The external account options object typically loaded + * from the external account JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + super(options, additionalOptions); + const opts = (0, util_1.originalOrCamelOptions)(options); + const credentialSource = opts.get('credential_source'); + const awsSecurityCredentialsSupplier = opts.get('aws_security_credentials_supplier'); + // Validate credential sourcing configuration. + if (!credentialSource && !awsSecurityCredentialsSupplier) { + throw new Error('A credential source or AWS security credentials supplier must be specified.'); + } + if (credentialSource && awsSecurityCredentialsSupplier) { + throw new Error('Only one of credential source or AWS security credentials supplier can be specified.'); + } + if (awsSecurityCredentialsSupplier) { + this.awsSecurityCredentialsSupplier = awsSecurityCredentialsSupplier; + this.regionalCredVerificationUrl = + __classPrivateFieldGet(_a, _a, "f", _AwsClient_DEFAULT_AWS_REGIONAL_CREDENTIAL_VERIFICATION_URL); + this.credentialSourceType = 'programmatic'; + } + else { + const credentialSourceOpts = (0, util_1.originalOrCamelOptions)(credentialSource); + this.environmentId = credentialSourceOpts.get('environment_id'); + // This is only required if the AWS region is not available in the + // AWS_REGION or AWS_DEFAULT_REGION environment variables. + const regionUrl = credentialSourceOpts.get('region_url'); + // This is only required if AWS security credentials are not available in + // environment variables. + const securityCredentialsUrl = credentialSourceOpts.get('url'); + const imdsV2SessionTokenUrl = credentialSourceOpts.get('imdsv2_session_token_url'); + this.awsSecurityCredentialsSupplier = + new defaultawssecuritycredentialssupplier_1.DefaultAwsSecurityCredentialsSupplier({ + regionUrl: regionUrl, + securityCredentialsUrl: securityCredentialsUrl, + imdsV2SessionTokenUrl: imdsV2SessionTokenUrl, + }); + this.regionalCredVerificationUrl = credentialSourceOpts.get('regional_cred_verification_url'); + this.credentialSourceType = 'aws'; + // Data validators. + this.validateEnvironmentId(); + } + this.awsRequestSigner = null; + this.region = ''; + } + validateEnvironmentId() { + var _b; + const match = (_b = this.environmentId) === null || _b === void 0 ? void 0 : _b.match(/^(aws)(\d+)$/); + if (!match || !this.regionalCredVerificationUrl) { + throw new Error('No valid AWS "credential_source" provided'); + } + else if (parseInt(match[2], 10) !== 1) { + throw new Error(`aws version "${match[2]}" is not supported in the current build.`); + } + } + /** + * Triggered when an external subject token is needed to be exchanged for a + * GCP access token via GCP STS endpoint. This will call the + * {@link AwsSecurityCredentialsSupplier} to retrieve an AWS region and AWS + * Security Credentials, then use them to create a signed AWS STS request that + * can be exchanged for a GCP access token. + * @return A promise that resolves with the external subject token. + */ + async retrieveSubjectToken() { + // Initialize AWS request signer if not already initialized. + if (!this.awsRequestSigner) { + this.region = await this.awsSecurityCredentialsSupplier.getAwsRegion(this.supplierContext); + this.awsRequestSigner = new awsrequestsigner_1.AwsRequestSigner(async () => { + return this.awsSecurityCredentialsSupplier.getAwsSecurityCredentials(this.supplierContext); + }, this.region); + } + // Generate signed request to AWS STS GetCallerIdentity API. + // Use the required regional endpoint. Otherwise, the request will fail. + const options = await this.awsRequestSigner.getRequestOptions({ + ..._a.RETRY_CONFIG, + url: this.regionalCredVerificationUrl.replace('{region}', this.region), + method: 'POST', + }); + // The GCP STS endpoint expects the headers to be formatted as: + // [ + // {key: 'x-amz-date', value: '...'}, + // {key: 'Authorization', value: '...'}, + // ... + // ] + // And then serialized as: + // encodeURIComponent(JSON.stringify({ + // url: '...', + // method: 'POST', + // headers: [{key: 'x-amz-date', value: '...'}, ...] + // })) + const reformattedHeader = []; + const extendedHeaders = Object.assign({ + // The full, canonical resource name of the workload identity pool + // provider, with or without the HTTPS prefix. + // Including this header as part of the signature is recommended to + // ensure data integrity. + 'x-goog-cloud-target-resource': this.audience, + }, options.headers); + // Reformat header to GCP STS expected format. + for (const key in extendedHeaders) { + reformattedHeader.push({ + key, + value: extendedHeaders[key], + }); + } + // Serialize the reformatted signed request. + return encodeURIComponent(JSON.stringify({ + url: options.url, + method: options.method, + headers: reformattedHeader, + })); + } +} +exports.AwsClient = AwsClient; +_a = AwsClient; +_AwsClient_DEFAULT_AWS_REGIONAL_CREDENTIAL_VERIFICATION_URL = { value: 'https://sts.{region}.amazonaws.com?Action=GetCallerIdentity&Version=2011-06-15' }; +/** + * @deprecated AWS client no validates the EC2 metadata address. + **/ +AwsClient.AWS_EC2_METADATA_IPV4_ADDRESS = '169.254.169.254'; +/** + * @deprecated AWS client no validates the EC2 metadata address. + **/ +AwsClient.AWS_EC2_METADATA_IPV6_ADDRESS = 'fd00:ec2::254'; + + +/***/ }), + +/***/ 7647: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AwsRequestSigner = void 0; +const crypto_1 = __nccwpck_require__(8851); +/** AWS Signature Version 4 signing algorithm identifier. */ +const AWS_ALGORITHM = 'AWS4-HMAC-SHA256'; +/** + * The termination string for the AWS credential scope value as defined in + * https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html + */ +const AWS_REQUEST_TYPE = 'aws4_request'; +/** + * Implements an AWS API request signer based on the AWS Signature Version 4 + * signing process. + * https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html + */ +class AwsRequestSigner { + /** + * Instantiates an AWS API request signer used to send authenticated signed + * requests to AWS APIs based on the AWS Signature Version 4 signing process. + * This also provides a mechanism to generate the signed request without + * sending it. + * @param getCredentials A mechanism to retrieve AWS security credentials + * when needed. + * @param region The AWS region to use. + */ + constructor(getCredentials, region) { + this.getCredentials = getCredentials; + this.region = region; + this.crypto = (0, crypto_1.createCrypto)(); + } + /** + * Generates the signed request for the provided HTTP request for calling + * an AWS API. This follows the steps described at: + * https://docs.aws.amazon.com/general/latest/gr/sigv4_signing.html + * @param amzOptions The AWS request options that need to be signed. + * @return A promise that resolves with the GaxiosOptions containing the + * signed HTTP request parameters. + */ + async getRequestOptions(amzOptions) { + if (!amzOptions.url) { + throw new Error('"url" is required in "amzOptions"'); + } + // Stringify JSON requests. This will be set in the request body of the + // generated signed request. + const requestPayloadData = typeof amzOptions.data === 'object' + ? JSON.stringify(amzOptions.data) + : amzOptions.data; + const url = amzOptions.url; + const method = amzOptions.method || 'GET'; + const requestPayload = amzOptions.body || requestPayloadData; + const additionalAmzHeaders = amzOptions.headers; + const awsSecurityCredentials = await this.getCredentials(); + const uri = new URL(url); + const headerMap = await generateAuthenticationHeaderMap({ + crypto: this.crypto, + host: uri.host, + canonicalUri: uri.pathname, + canonicalQuerystring: uri.search.substr(1), + method, + region: this.region, + securityCredentials: awsSecurityCredentials, + requestPayload, + additionalAmzHeaders, + }); + // Append additional optional headers, eg. X-Amz-Target, Content-Type, etc. + const headers = Object.assign( + // Add x-amz-date if available. + headerMap.amzDate ? { 'x-amz-date': headerMap.amzDate } : {}, { + Authorization: headerMap.authorizationHeader, + host: uri.host, + }, additionalAmzHeaders || {}); + if (awsSecurityCredentials.token) { + Object.assign(headers, { + 'x-amz-security-token': awsSecurityCredentials.token, + }); + } + const awsSignedReq = { + url, + method: method, + headers, + }; + if (typeof requestPayload !== 'undefined') { + awsSignedReq.body = requestPayload; + } + return awsSignedReq; + } +} +exports.AwsRequestSigner = AwsRequestSigner; +/** + * Creates the HMAC-SHA256 hash of the provided message using the + * provided key. + * + * @param crypto The crypto instance used to facilitate cryptographic + * operations. + * @param key The HMAC-SHA256 key to use. + * @param msg The message to hash. + * @return The computed hash bytes. + */ +async function sign(crypto, key, msg) { + return await crypto.signWithHmacSha256(key, msg); +} +/** + * Calculates the signing key used to calculate the signature for + * AWS Signature Version 4 based on: + * https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html + * + * @param crypto The crypto instance used to facilitate cryptographic + * operations. + * @param key The AWS secret access key. + * @param dateStamp The '%Y%m%d' date format. + * @param region The AWS region. + * @param serviceName The AWS service name, eg. sts. + * @return The signing key bytes. + */ +async function getSigningKey(crypto, key, dateStamp, region, serviceName) { + const kDate = await sign(crypto, `AWS4${key}`, dateStamp); + const kRegion = await sign(crypto, kDate, region); + const kService = await sign(crypto, kRegion, serviceName); + const kSigning = await sign(crypto, kService, 'aws4_request'); + return kSigning; +} +/** + * Generates the authentication header map needed for generating the AWS + * Signature Version 4 signed request. + * + * @param option The options needed to compute the authentication header map. + * @return The AWS authentication header map which constitutes of the following + * components: amz-date, authorization header and canonical query string. + */ +async function generateAuthenticationHeaderMap(options) { + const additionalAmzHeaders = options.additionalAmzHeaders || {}; + const requestPayload = options.requestPayload || ''; + // iam.amazonaws.com host => iam service. + // sts.us-east-2.amazonaws.com => sts service. + const serviceName = options.host.split('.')[0]; + const now = new Date(); + // Format: '%Y%m%dT%H%M%SZ'. + const amzDate = now + .toISOString() + .replace(/[-:]/g, '') + .replace(/\.[0-9]+/, ''); + // Format: '%Y%m%d'. + const dateStamp = now.toISOString().replace(/[-]/g, '').replace(/T.*/, ''); + // Change all additional headers to be lower case. + const reformattedAdditionalAmzHeaders = {}; + Object.keys(additionalAmzHeaders).forEach(key => { + reformattedAdditionalAmzHeaders[key.toLowerCase()] = + additionalAmzHeaders[key]; + }); + // Add AWS token if available. + if (options.securityCredentials.token) { + reformattedAdditionalAmzHeaders['x-amz-security-token'] = + options.securityCredentials.token; + } + // Header keys need to be sorted alphabetically. + const amzHeaders = Object.assign({ + host: options.host, + }, + // Previously the date was not fixed with x-amz- and could be provided manually. + // https://github.com/boto/botocore/blob/879f8440a4e9ace5d3cf145ce8b3d5e5ffb892ef/tests/unit/auth/aws4_testsuite/get-header-value-trim.req + reformattedAdditionalAmzHeaders.date ? {} : { 'x-amz-date': amzDate }, reformattedAdditionalAmzHeaders); + let canonicalHeaders = ''; + const signedHeadersList = Object.keys(amzHeaders).sort(); + signedHeadersList.forEach(key => { + canonicalHeaders += `${key}:${amzHeaders[key]}\n`; + }); + const signedHeaders = signedHeadersList.join(';'); + const payloadHash = await options.crypto.sha256DigestHex(requestPayload); + // https://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html + const canonicalRequest = `${options.method}\n` + + `${options.canonicalUri}\n` + + `${options.canonicalQuerystring}\n` + + `${canonicalHeaders}\n` + + `${signedHeaders}\n` + + `${payloadHash}`; + const credentialScope = `${dateStamp}/${options.region}/${serviceName}/${AWS_REQUEST_TYPE}`; + // https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html + const stringToSign = `${AWS_ALGORITHM}\n` + + `${amzDate}\n` + + `${credentialScope}\n` + + (await options.crypto.sha256DigestHex(canonicalRequest)); + // https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html + const signingKey = await getSigningKey(options.crypto, options.securityCredentials.secretAccessKey, dateStamp, options.region, serviceName); + const signature = await sign(options.crypto, signingKey, stringToSign); + // https://docs.aws.amazon.com/general/latest/gr/sigv4-add-signature-to-request.html + const authorizationHeader = `${AWS_ALGORITHM} Credential=${options.securityCredentials.accessKeyId}/` + + `${credentialScope}, SignedHeaders=${signedHeaders}, ` + + `Signature=${(0, crypto_1.fromArrayBufferToHex)(signature)}`; + return { + // Do not return x-amz-date if date is available. + amzDate: reformattedAdditionalAmzHeaders.date ? undefined : amzDate, + authorizationHeader, + canonicalQuerystring: options.canonicalQuerystring, + }; +} + + +/***/ }), + +/***/ 142: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var _BaseExternalAccountClient_instances, _BaseExternalAccountClient_pendingAccessToken, _BaseExternalAccountClient_internalRefreshAccessTokenAsync; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.BaseExternalAccountClient = exports.DEFAULT_UNIVERSE = exports.CLOUD_RESOURCE_MANAGER = exports.EXTERNAL_ACCOUNT_TYPE = exports.EXPIRATION_TIME_OFFSET = void 0; +const stream = __nccwpck_require__(2203); +const authclient_1 = __nccwpck_require__(4810); +const sts = __nccwpck_require__(121); +const util_1 = __nccwpck_require__(7870); +/** + * The required token exchange grant_type: rfc8693#section-2.1 + */ +const STS_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:token-exchange'; +/** + * The requested token exchange requested_token_type: rfc8693#section-2.1 + */ +const STS_REQUEST_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; +/** The default OAuth scope to request when none is provided. */ +const DEFAULT_OAUTH_SCOPE = 'https://www.googleapis.com/auth/cloud-platform'; +/** Default impersonated token lifespan in seconds.*/ +const DEFAULT_TOKEN_LIFESPAN = 3600; +/** + * Offset to take into account network delays and server clock skews. + */ +exports.EXPIRATION_TIME_OFFSET = 5 * 60 * 1000; +/** + * The credentials JSON file type for external account clients. + * There are 3 types of JSON configs: + * 1. authorized_user => Google end user credential + * 2. service_account => Google service account credential + * 3. external_Account => non-GCP service (eg. AWS, Azure, K8s) + */ +exports.EXTERNAL_ACCOUNT_TYPE = 'external_account'; +/** + * Cloud resource manager URL used to retrieve project information. + * + * @deprecated use {@link BaseExternalAccountClient.cloudResourceManagerURL} instead + **/ +exports.CLOUD_RESOURCE_MANAGER = 'https://cloudresourcemanager.googleapis.com/v1/projects/'; +/** The workforce audience pattern. */ +const WORKFORCE_AUDIENCE_PATTERN = '//iam\\.googleapis\\.com/locations/[^/]+/workforcePools/[^/]+/providers/.+'; +const DEFAULT_TOKEN_URL = 'https://sts.{universeDomain}/v1/token'; +// eslint-disable-next-line @typescript-eslint/no-var-requires +const pkg = __nccwpck_require__(6066); +/** + * For backwards compatibility. + */ +var authclient_2 = __nccwpck_require__(4810); +Object.defineProperty(exports, "DEFAULT_UNIVERSE", ({ enumerable: true, get: function () { return authclient_2.DEFAULT_UNIVERSE; } })); +/** + * Base external account client. This is used to instantiate AuthClients for + * exchanging external account credentials for GCP access token and authorizing + * requests to GCP APIs. + * The base class implements common logic for exchanging various type of + * external credentials for GCP access token. The logic of determining and + * retrieving the external credential based on the environment and + * credential_source will be left for the subclasses. + */ +class BaseExternalAccountClient extends authclient_1.AuthClient { + /** + * Instantiate a BaseExternalAccountClient instance using the provided JSON + * object loaded from an external account credentials file. + * @param options The external account options object typically loaded + * from the external account JSON credential file. The camelCased options + * are aliases for the snake_cased options. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + var _a; + super({ ...options, ...additionalOptions }); + _BaseExternalAccountClient_instances.add(this); + /** + * A pending access token request. Used for concurrent calls. + */ + _BaseExternalAccountClient_pendingAccessToken.set(this, null); + const opts = (0, util_1.originalOrCamelOptions)(options); + const type = opts.get('type'); + if (type && type !== exports.EXTERNAL_ACCOUNT_TYPE) { + throw new Error(`Expected "${exports.EXTERNAL_ACCOUNT_TYPE}" type but ` + + `received "${options.type}"`); + } + const clientId = opts.get('client_id'); + const clientSecret = opts.get('client_secret'); + const tokenUrl = (_a = opts.get('token_url')) !== null && _a !== void 0 ? _a : DEFAULT_TOKEN_URL.replace('{universeDomain}', this.universeDomain); + const subjectTokenType = opts.get('subject_token_type'); + const workforcePoolUserProject = opts.get('workforce_pool_user_project'); + const serviceAccountImpersonationUrl = opts.get('service_account_impersonation_url'); + const serviceAccountImpersonation = opts.get('service_account_impersonation'); + const serviceAccountImpersonationLifetime = (0, util_1.originalOrCamelOptions)(serviceAccountImpersonation).get('token_lifetime_seconds'); + this.cloudResourceManagerURL = new URL(opts.get('cloud_resource_manager_url') || + `https://cloudresourcemanager.${this.universeDomain}/v1/projects/`); + if (clientId) { + this.clientAuth = { + confidentialClientType: 'basic', + clientId, + clientSecret, + }; + } + this.stsCredential = new sts.StsCredentials(tokenUrl, this.clientAuth); + this.scopes = opts.get('scopes') || [DEFAULT_OAUTH_SCOPE]; + this.cachedAccessToken = null; + this.audience = opts.get('audience'); + this.subjectTokenType = subjectTokenType; + this.workforcePoolUserProject = workforcePoolUserProject; + const workforceAudiencePattern = new RegExp(WORKFORCE_AUDIENCE_PATTERN); + if (this.workforcePoolUserProject && + !this.audience.match(workforceAudiencePattern)) { + throw new Error('workforcePoolUserProject should not be set for non-workforce pool ' + + 'credentials.'); + } + this.serviceAccountImpersonationUrl = serviceAccountImpersonationUrl; + this.serviceAccountImpersonationLifetime = + serviceAccountImpersonationLifetime; + if (this.serviceAccountImpersonationLifetime) { + this.configLifetimeRequested = true; + } + else { + this.configLifetimeRequested = false; + this.serviceAccountImpersonationLifetime = DEFAULT_TOKEN_LIFESPAN; + } + this.projectNumber = this.getProjectNumber(this.audience); + this.supplierContext = { + audience: this.audience, + subjectTokenType: this.subjectTokenType, + transporter: this.transporter, + }; + } + /** The service account email to be impersonated, if available. */ + getServiceAccountEmail() { + var _a; + if (this.serviceAccountImpersonationUrl) { + if (this.serviceAccountImpersonationUrl.length > 256) { + /** + * Prevents DOS attacks. + * @see {@link https://github.com/googleapis/google-auth-library-nodejs/security/code-scanning/84} + **/ + throw new RangeError(`URL is too long: ${this.serviceAccountImpersonationUrl}`); + } + // Parse email from URL. The formal looks as follows: + // https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/name@project-id.iam.gserviceaccount.com:generateAccessToken + const re = /serviceAccounts\/(?[^:]+):generateAccessToken$/; + const result = re.exec(this.serviceAccountImpersonationUrl); + return ((_a = result === null || result === void 0 ? void 0 : result.groups) === null || _a === void 0 ? void 0 : _a.email) || null; + } + return null; + } + /** + * Provides a mechanism to inject GCP access tokens directly. + * When the provided credential expires, a new credential, using the + * external account options, is retrieved. + * @param credentials The Credentials object to set on the current client. + */ + setCredentials(credentials) { + super.setCredentials(credentials); + this.cachedAccessToken = credentials; + } + /** + * @return A promise that resolves with the current GCP access token + * response. If the current credential is expired, a new one is retrieved. + */ + async getAccessToken() { + // If cached access token is unavailable or expired, force refresh. + if (!this.cachedAccessToken || this.isExpired(this.cachedAccessToken)) { + await this.refreshAccessTokenAsync(); + } + // Return GCP access token in GetAccessTokenResponse format. + return { + token: this.cachedAccessToken.access_token, + res: this.cachedAccessToken.res, + }; + } + /** + * The main authentication interface. It takes an optional url which when + * present is the endpoint being accessed, and returns a Promise which + * resolves with authorization header fields. + * + * The result has the form: + * { Authorization: 'Bearer ' } + */ + async getRequestHeaders() { + const accessTokenResponse = await this.getAccessToken(); + const headers = { + Authorization: `Bearer ${accessTokenResponse.token}`, + }; + return this.addSharedMetadataHeaders(headers); + } + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); + }); + } + else { + return this.requestAsync(opts); + } + } + /** + * @return A promise that resolves with the project ID corresponding to the + * current workload identity pool or current workforce pool if + * determinable. For workforce pool credential, it returns the project ID + * corresponding to the workforcePoolUserProject. + * This is introduced to match the current pattern of using the Auth + * library: + * const projectId = await auth.getProjectId(); + * const url = `https://dns.googleapis.com/dns/v1/projects/${projectId}`; + * const res = await client.request({ url }); + * The resource may not have permission + * (resourcemanager.projects.get) to call this API or the required + * scopes may not be selected: + * https://cloud.google.com/resource-manager/reference/rest/v1/projects/get#authorization-scopes + */ + async getProjectId() { + const projectNumber = this.projectNumber || this.workforcePoolUserProject; + if (this.projectId) { + // Return previously determined project ID. + return this.projectId; + } + else if (projectNumber) { + // Preferable not to use request() to avoid retrial policies. + const headers = await this.getRequestHeaders(); + const response = await this.transporter.request({ + ...BaseExternalAccountClient.RETRY_CONFIG, + headers, + url: `${this.cloudResourceManagerURL.toString()}${projectNumber}`, + responseType: 'json', + }); + this.projectId = response.data.projectId; + return this.projectId; + } + return null; + } + /** + * Authenticates the provided HTTP request, processes it and resolves with the + * returned response. + * @param opts The HTTP request options. + * @param reAuthRetried Whether the current attempt is a retry after a failed attempt due to an auth failure. + * @return A promise that resolves with the successful response. + */ + async requestAsync(opts, reAuthRetried = false) { + let response; + try { + const requestHeaders = await this.getRequestHeaders(); + opts.headers = opts.headers || {}; + if (requestHeaders && requestHeaders['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = + requestHeaders['x-goog-user-project']; + } + if (requestHeaders && requestHeaders.Authorization) { + opts.headers.Authorization = requestHeaders.Authorization; + } + response = await this.transporter.request(opts); + } + catch (e) { + const res = e.response; + if (res) { + const statusCode = res.status; + // Retry the request for metadata if the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - forceRefreshOnFailure is true + const isReadableStream = res.config.data instanceof stream.Readable; + const isAuthErr = statusCode === 401 || statusCode === 403; + if (!reAuthRetried && + isAuthErr && + !isReadableStream && + this.forceRefreshOnFailure) { + await this.refreshAccessTokenAsync(); + return await this.requestAsync(opts, true); + } + } + throw e; + } + return response; + } + /** + * Forces token refresh, even if unexpired tokens are currently cached. + * External credentials are exchanged for GCP access tokens via the token + * exchange endpoint and other settings provided in the client options + * object. + * If the service_account_impersonation_url is provided, an additional + * step to exchange the external account GCP access token for a service + * account impersonated token is performed. + * @return A promise that resolves with the fresh GCP access tokens. + */ + async refreshAccessTokenAsync() { + // Use an existing access token request, or cache a new one + __classPrivateFieldSet(this, _BaseExternalAccountClient_pendingAccessToken, __classPrivateFieldGet(this, _BaseExternalAccountClient_pendingAccessToken, "f") || __classPrivateFieldGet(this, _BaseExternalAccountClient_instances, "m", _BaseExternalAccountClient_internalRefreshAccessTokenAsync).call(this), "f"); + try { + return await __classPrivateFieldGet(this, _BaseExternalAccountClient_pendingAccessToken, "f"); + } + finally { + // clear pending access token for future requests + __classPrivateFieldSet(this, _BaseExternalAccountClient_pendingAccessToken, null, "f"); + } + } + /** + * Returns the workload identity pool project number if it is determinable + * from the audience resource name. + * @param audience The STS audience used to determine the project number. + * @return The project number associated with the workload identity pool, if + * this can be determined from the STS audience field. Otherwise, null is + * returned. + */ + getProjectNumber(audience) { + // STS audience pattern: + // //iam.googleapis.com/projects/$PROJECT_NUMBER/locations/... + const match = audience.match(/\/projects\/([^/]+)/); + if (!match) { + return null; + } + return match[1]; + } + /** + * Exchanges an external account GCP access token for a service + * account impersonated access token using iamcredentials + * GenerateAccessToken API. + * @param token The access token to exchange for a service account access + * token. + * @return A promise that resolves with the service account impersonated + * credentials response. + */ + async getImpersonatedAccessToken(token) { + const opts = { + ...BaseExternalAccountClient.RETRY_CONFIG, + url: this.serviceAccountImpersonationUrl, + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}`, + }, + data: { + scope: this.getScopesArray(), + lifetime: this.serviceAccountImpersonationLifetime + 's', + }, + responseType: 'json', + }; + const response = await this.transporter.request(opts); + const successResponse = response.data; + return { + access_token: successResponse.accessToken, + // Convert from ISO format to timestamp. + expiry_date: new Date(successResponse.expireTime).getTime(), + res: response, + }; + } + /** + * Returns whether the provided credentials are expired or not. + * If there is no expiry time, assumes the token is not expired or expiring. + * @param accessToken The credentials to check for expiration. + * @return Whether the credentials are expired or not. + */ + isExpired(accessToken) { + const now = new Date().getTime(); + return accessToken.expiry_date + ? now >= accessToken.expiry_date - this.eagerRefreshThresholdMillis + : false; + } + /** + * @return The list of scopes for the requested GCP access token. + */ + getScopesArray() { + // Since scopes can be provided as string or array, the type should + // be normalized. + if (typeof this.scopes === 'string') { + return [this.scopes]; + } + return this.scopes || [DEFAULT_OAUTH_SCOPE]; + } + getMetricsHeaderValue() { + const nodeVersion = process.version.replace(/^v/, ''); + const saImpersonation = this.serviceAccountImpersonationUrl !== undefined; + const credentialSourceType = this.credentialSourceType + ? this.credentialSourceType + : 'unknown'; + return `gl-node/${nodeVersion} auth/${pkg.version} google-byoid-sdk source/${credentialSourceType} sa-impersonation/${saImpersonation} config-lifetime/${this.configLifetimeRequested}`; + } +} +exports.BaseExternalAccountClient = BaseExternalAccountClient; +_BaseExternalAccountClient_pendingAccessToken = new WeakMap(), _BaseExternalAccountClient_instances = new WeakSet(), _BaseExternalAccountClient_internalRefreshAccessTokenAsync = async function _BaseExternalAccountClient_internalRefreshAccessTokenAsync() { + // Retrieve the external credential. + const subjectToken = await this.retrieveSubjectToken(); + // Construct the STS credentials options. + const stsCredentialsOptions = { + grantType: STS_GRANT_TYPE, + audience: this.audience, + requestedTokenType: STS_REQUEST_TOKEN_TYPE, + subjectToken, + subjectTokenType: this.subjectTokenType, + // generateAccessToken requires the provided access token to have + // scopes: + // https://www.googleapis.com/auth/iam or + // https://www.googleapis.com/auth/cloud-platform + // The new service account access token scopes will match the user + // provided ones. + scope: this.serviceAccountImpersonationUrl + ? [DEFAULT_OAUTH_SCOPE] + : this.getScopesArray(), + }; + // Exchange the external credentials for a GCP access token. + // Client auth is prioritized over passing the workforcePoolUserProject + // parameter for STS token exchange. + const additionalOptions = !this.clientAuth && this.workforcePoolUserProject + ? { userProject: this.workforcePoolUserProject } + : undefined; + const additionalHeaders = { + 'x-goog-api-client': this.getMetricsHeaderValue(), + }; + const stsResponse = await this.stsCredential.exchangeToken(stsCredentialsOptions, additionalHeaders, additionalOptions); + if (this.serviceAccountImpersonationUrl) { + this.cachedAccessToken = await this.getImpersonatedAccessToken(stsResponse.access_token); + } + else if (stsResponse.expires_in) { + // Save response in cached access token. + this.cachedAccessToken = { + access_token: stsResponse.access_token, + expiry_date: new Date().getTime() + stsResponse.expires_in * 1000, + res: stsResponse.res, + }; + } + else { + // Save response in cached access token. + this.cachedAccessToken = { + access_token: stsResponse.access_token, + res: stsResponse.res, + }; + } + // Save credentials. + this.credentials = {}; + Object.assign(this.credentials, this.cachedAccessToken); + delete this.credentials.res; + // Trigger tokens event to notify external listeners. + this.emit('tokens', { + refresh_token: null, + expiry_date: this.cachedAccessToken.expiry_date, + access_token: this.cachedAccessToken.access_token, + token_type: 'Bearer', + id_token: null, + }); + // Return the cached access token. + return this.cachedAccessToken; +}; + + +/***/ }), + +/***/ 977: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2013 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Compute = void 0; +const gaxios_1 = __nccwpck_require__(7003); +const gcpMetadata = __nccwpck_require__(3046); +const oauth2client_1 = __nccwpck_require__(91); +class Compute extends oauth2client_1.OAuth2Client { + /** + * Google Compute Engine service account credentials. + * + * Retrieve access token from the metadata server. + * See: https://cloud.google.com/compute/docs/access/authenticate-workloads#applications + */ + constructor(options = {}) { + super(options); + // Start with an expired refresh token, which will automatically be + // refreshed before the first API call is made. + this.credentials = { expiry_date: 1, refresh_token: 'compute-placeholder' }; + this.serviceAccountEmail = options.serviceAccountEmail || 'default'; + this.scopes = Array.isArray(options.scopes) + ? options.scopes + : options.scopes + ? [options.scopes] + : []; + } + /** + * Refreshes the access token. + * @param refreshToken Unused parameter + */ + async refreshTokenNoCache( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + refreshToken) { + const tokenPath = `service-accounts/${this.serviceAccountEmail}/token`; + let data; + try { + const instanceOptions = { + property: tokenPath, + }; + if (this.scopes.length > 0) { + instanceOptions.params = { + scopes: this.scopes.join(','), + }; + } + data = await gcpMetadata.instance(instanceOptions); + } + catch (e) { + if (e instanceof gaxios_1.GaxiosError) { + e.message = `Could not refresh access token: ${e.message}`; + this.wrapError(e); + } + throw e; + } + const tokens = data; + if (data && data.expires_in) { + tokens.expiry_date = new Date().getTime() + data.expires_in * 1000; + delete tokens.expires_in; + } + this.emit('tokens', tokens); + return { tokens, res: null }; + } + /** + * Fetches an ID token. + * @param targetAudience the audience for the fetched ID token. + */ + async fetchIdToken(targetAudience) { + const idTokenPath = `service-accounts/${this.serviceAccountEmail}/identity` + + `?format=full&audience=${targetAudience}`; + let idToken; + try { + const instanceOptions = { + property: idTokenPath, + }; + idToken = await gcpMetadata.instance(instanceOptions); + } + catch (e) { + if (e instanceof Error) { + e.message = `Could not fetch ID token: ${e.message}`; + } + throw e; + } + return idToken; + } + wrapError(e) { + const res = e.response; + if (res && res.status) { + e.status = res.status; + if (res.status === 403) { + e.message = + 'A Forbidden error was returned while attempting to retrieve an access ' + + 'token for the Compute Engine built-in service account. This may be because the Compute ' + + 'Engine instance does not have the correct permission scopes specified: ' + + e.message; + } + else if (res.status === 404) { + e.message = + 'A Not Found error was returned while attempting to retrieve an access' + + 'token for the Compute Engine built-in service account. This may be because the Compute ' + + 'Engine instance does not have any permission scopes specified: ' + + e.message; + } + } + } +} +exports.Compute = Compute; + + +/***/ }), + +/***/ 9157: +/***/ (function(__unused_webpack_module, exports) { + +"use strict"; + +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _DefaultAwsSecurityCredentialsSupplier_instances, _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken, _DefaultAwsSecurityCredentialsSupplier_getAwsRoleName, _DefaultAwsSecurityCredentialsSupplier_retrieveAwsSecurityCredentials, _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get, _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DefaultAwsSecurityCredentialsSupplier = void 0; +/** + * Internal AWS security credentials supplier implementation used by {@link AwsClient} + * when a credential source is provided instead of a user defined supplier. + * The logic is summarized as: + * 1. If imdsv2_session_token_url is provided in the credential source, then + * fetch the aws session token and include it in the headers of the + * metadata requests. This is a requirement for IDMSv2 but optional + * for IDMSv1. + * 2. Retrieve AWS region from availability-zone. + * 3a. Check AWS credentials in environment variables. If not found, get + * from security-credentials endpoint. + * 3b. Get AWS credentials from security-credentials endpoint. In order + * to retrieve this, the AWS role needs to be determined by calling + * security-credentials endpoint without any argument. Then the + * credentials can be retrieved via: security-credentials/role_name + * 4. Generate the signed request to AWS STS GetCallerIdentity action. + * 5. Inject x-goog-cloud-target-resource into header and serialize the + * signed request. This will be the subject-token to pass to GCP STS. + */ +class DefaultAwsSecurityCredentialsSupplier { + /** + * Instantiates a new DefaultAwsSecurityCredentialsSupplier using information + * from the credential_source stored in the ADC file. + * @param opts The default aws security credentials supplier options object to + * build the supplier with. + */ + constructor(opts) { + _DefaultAwsSecurityCredentialsSupplier_instances.add(this); + this.regionUrl = opts.regionUrl; + this.securityCredentialsUrl = opts.securityCredentialsUrl; + this.imdsV2SessionTokenUrl = opts.imdsV2SessionTokenUrl; + this.additionalGaxiosOptions = opts.additionalGaxiosOptions; + } + /** + * Returns the active AWS region. This first checks to see if the region + * is available as an environment variable. If it is not, then the supplier + * will call the region URL. + * @param context {@link ExternalAccountSupplierContext} from the calling + * {@link AwsClient}, contains the requested audience and subject token type + * for the external account identity. + * @return A promise that resolves with the AWS region string. + */ + async getAwsRegion(context) { + // Priority order for region determination: + // AWS_REGION > AWS_DEFAULT_REGION > metadata server. + if (__classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get)) { + return __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get); + } + const metadataHeaders = {}; + if (!__classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get) && this.imdsV2SessionTokenUrl) { + metadataHeaders['x-aws-ec2-metadata-token'] = + await __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "m", _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken).call(this, context.transporter); + } + if (!this.regionUrl) { + throw new Error('Unable to determine AWS region due to missing ' + + '"options.credential_source.region_url"'); + } + const opts = { + ...this.additionalGaxiosOptions, + url: this.regionUrl, + method: 'GET', + responseType: 'text', + headers: metadataHeaders, + }; + const response = await context.transporter.request(opts); + // Remove last character. For example, if us-east-2b is returned, + // the region would be us-east-2. + return response.data.substr(0, response.data.length - 1); + } + /** + * Returns AWS security credentials. This first checks to see if the credentials + * is available as environment variables. If it is not, then the supplier + * will call the security credentials URL. + * @param context {@link ExternalAccountSupplierContext} from the calling + * {@link AwsClient}, contains the requested audience and subject token type + * for the external account identity. + * @return A promise that resolves with the AWS security credentials. + */ + async getAwsSecurityCredentials(context) { + // Check environment variables for permanent credentials first. + // https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html + if (__classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get)) { + return __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get); + } + const metadataHeaders = {}; + if (this.imdsV2SessionTokenUrl) { + metadataHeaders['x-aws-ec2-metadata-token'] = + await __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "m", _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken).call(this, context.transporter); + } + // Since the role on a VM can change, we don't need to cache it. + const roleName = await __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "m", _DefaultAwsSecurityCredentialsSupplier_getAwsRoleName).call(this, metadataHeaders, context.transporter); + // Temporary credentials typically last for several hours. + // Expiration is returned in response. + // Consider future optimization of this logic to cache AWS tokens + // until their natural expiration. + const awsCreds = await __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "m", _DefaultAwsSecurityCredentialsSupplier_retrieveAwsSecurityCredentials).call(this, roleName, metadataHeaders, context.transporter); + return { + accessKeyId: awsCreds.AccessKeyId, + secretAccessKey: awsCreds.SecretAccessKey, + token: awsCreds.Token, + }; + } +} +exports.DefaultAwsSecurityCredentialsSupplier = DefaultAwsSecurityCredentialsSupplier; +_DefaultAwsSecurityCredentialsSupplier_instances = new WeakSet(), _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken = +/** + * @param transporter The transporter to use for requests. + * @return A promise that resolves with the IMDSv2 Session Token. + */ +async function _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken(transporter) { + const opts = { + ...this.additionalGaxiosOptions, + url: this.imdsV2SessionTokenUrl, + method: 'PUT', + responseType: 'text', + headers: { 'x-aws-ec2-metadata-token-ttl-seconds': '300' }, + }; + const response = await transporter.request(opts); + return response.data; +}, _DefaultAwsSecurityCredentialsSupplier_getAwsRoleName = +/** + * @param headers The headers to be used in the metadata request. + * @param transporter The transporter to use for requests. + * @return A promise that resolves with the assigned role to the current + * AWS VM. This is needed for calling the security-credentials endpoint. + */ +async function _DefaultAwsSecurityCredentialsSupplier_getAwsRoleName(headers, transporter) { + if (!this.securityCredentialsUrl) { + throw new Error('Unable to determine AWS role name due to missing ' + + '"options.credential_source.url"'); + } + const opts = { + ...this.additionalGaxiosOptions, + url: this.securityCredentialsUrl, + method: 'GET', + responseType: 'text', + headers: headers, + }; + const response = await transporter.request(opts); + return response.data; +}, _DefaultAwsSecurityCredentialsSupplier_retrieveAwsSecurityCredentials = +/** + * Retrieves the temporary AWS credentials by calling the security-credentials + * endpoint as specified in the `credential_source` object. + * @param roleName The role attached to the current VM. + * @param headers The headers to be used in the metadata request. + * @param transporter The transporter to use for requests. + * @return A promise that resolves with the temporary AWS credentials + * needed for creating the GetCallerIdentity signed request. + */ +async function _DefaultAwsSecurityCredentialsSupplier_retrieveAwsSecurityCredentials(roleName, headers, transporter) { + const response = await transporter.request({ + ...this.additionalGaxiosOptions, + url: `${this.securityCredentialsUrl}/${roleName}`, + responseType: 'json', + headers: headers, + }); + return response.data; +}, _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get = function _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get() { + // The AWS region can be provided through AWS_REGION or AWS_DEFAULT_REGION. + // Only one is required. + return (process.env['AWS_REGION'] || process.env['AWS_DEFAULT_REGION'] || null); +}, _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get = function _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get() { + // Both AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY are required. + if (process.env['AWS_ACCESS_KEY_ID'] && + process.env['AWS_SECRET_ACCESS_KEY']) { + return { + accessKeyId: process.env['AWS_ACCESS_KEY_ID'], + secretAccessKey: process.env['AWS_SECRET_ACCESS_KEY'], + token: process.env['AWS_SESSION_TOKEN'], + }; + } + return null; +}; + + +/***/ }), + +/***/ 7556: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DownscopedClient = exports.EXPIRATION_TIME_OFFSET = exports.MAX_ACCESS_BOUNDARY_RULES_COUNT = void 0; +const stream = __nccwpck_require__(2203); +const authclient_1 = __nccwpck_require__(4810); +const sts = __nccwpck_require__(121); +/** + * The required token exchange grant_type: rfc8693#section-2.1 + */ +const STS_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:token-exchange'; +/** + * The requested token exchange requested_token_type: rfc8693#section-2.1 + */ +const STS_REQUEST_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; +/** + * The requested token exchange subject_token_type: rfc8693#section-2.1 + */ +const STS_SUBJECT_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; +/** + * The maximum number of access boundary rules a Credential Access Boundary + * can contain. + */ +exports.MAX_ACCESS_BOUNDARY_RULES_COUNT = 10; +/** + * Offset to take into account network delays and server clock skews. + */ +exports.EXPIRATION_TIME_OFFSET = 5 * 60 * 1000; +/** + * Defines a set of Google credentials that are downscoped from an existing set + * of Google OAuth2 credentials. This is useful to restrict the Identity and + * Access Management (IAM) permissions that a short-lived credential can use. + * The common pattern of usage is to have a token broker with elevated access + * generate these downscoped credentials from higher access source credentials + * and pass the downscoped short-lived access tokens to a token consumer via + * some secure authenticated channel for limited access to Google Cloud Storage + * resources. + */ +class DownscopedClient extends authclient_1.AuthClient { + /** + * Instantiates a downscoped client object using the provided source + * AuthClient and credential access boundary rules. + * To downscope permissions of a source AuthClient, a Credential Access + * Boundary that specifies which resources the new credential can access, as + * well as an upper bound on the permissions that are available on each + * resource, has to be defined. A downscoped client can then be instantiated + * using the source AuthClient and the Credential Access Boundary. + * @param authClient The source AuthClient to be downscoped based on the + * provided Credential Access Boundary rules. + * @param credentialAccessBoundary The Credential Access Boundary which + * contains a list of access boundary rules. Each rule contains information + * on the resource that the rule applies to, the upper bound of the + * permissions that are available on that resource and an optional + * condition to further restrict permissions. + * @param additionalOptions **DEPRECATED, set this in the provided `authClient`.** + * Optional additional behavior customization options. + * @param quotaProjectId **DEPRECATED, set this in the provided `authClient`.** + * Optional quota project id for setting up in the x-goog-user-project header. + */ + constructor(authClient, credentialAccessBoundary, additionalOptions, quotaProjectId) { + super({ ...additionalOptions, quotaProjectId }); + this.authClient = authClient; + this.credentialAccessBoundary = credentialAccessBoundary; + // Check 1-10 Access Boundary Rules are defined within Credential Access + // Boundary. + if (credentialAccessBoundary.accessBoundary.accessBoundaryRules.length === 0) { + throw new Error('At least one access boundary rule needs to be defined.'); + } + else if (credentialAccessBoundary.accessBoundary.accessBoundaryRules.length > + exports.MAX_ACCESS_BOUNDARY_RULES_COUNT) { + throw new Error('The provided access boundary has more than ' + + `${exports.MAX_ACCESS_BOUNDARY_RULES_COUNT} access boundary rules.`); + } + // Check at least one permission should be defined in each Access Boundary + // Rule. + for (const rule of credentialAccessBoundary.accessBoundary + .accessBoundaryRules) { + if (rule.availablePermissions.length === 0) { + throw new Error('At least one permission should be defined in access boundary rules.'); + } + } + this.stsCredential = new sts.StsCredentials(`https://sts.${this.universeDomain}/v1/token`); + this.cachedDownscopedAccessToken = null; + } + /** + * Provides a mechanism to inject Downscoped access tokens directly. + * The expiry_date field is required to facilitate determination of the token + * expiration which would make it easier for the token consumer to handle. + * @param credentials The Credentials object to set on the current client. + */ + setCredentials(credentials) { + if (!credentials.expiry_date) { + throw new Error('The access token expiry_date field is missing in the provided ' + + 'credentials.'); + } + super.setCredentials(credentials); + this.cachedDownscopedAccessToken = credentials; + } + async getAccessToken() { + // If the cached access token is unavailable or expired, force refresh. + // The Downscoped access token will be returned in + // DownscopedAccessTokenResponse format. + if (!this.cachedDownscopedAccessToken || + this.isExpired(this.cachedDownscopedAccessToken)) { + await this.refreshAccessTokenAsync(); + } + // Return Downscoped access token in DownscopedAccessTokenResponse format. + return { + token: this.cachedDownscopedAccessToken.access_token, + expirationTime: this.cachedDownscopedAccessToken.expiry_date, + res: this.cachedDownscopedAccessToken.res, + }; + } + /** + * The main authentication interface. It takes an optional url which when + * present is the endpoint being accessed, and returns a Promise which + * resolves with authorization header fields. + * + * The result has the form: + * { Authorization: 'Bearer ' } + */ + async getRequestHeaders() { + const accessTokenResponse = await this.getAccessToken(); + const headers = { + Authorization: `Bearer ${accessTokenResponse.token}`, + }; + return this.addSharedMetadataHeaders(headers); + } + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); + }); + } + else { + return this.requestAsync(opts); + } + } + /** + * Authenticates the provided HTTP request, processes it and resolves with the + * returned response. + * @param opts The HTTP request options. + * @param reAuthRetried Whether the current attempt is a retry after a failed attempt due to an auth failure + * @return A promise that resolves with the successful response. + */ + async requestAsync(opts, reAuthRetried = false) { + let response; + try { + const requestHeaders = await this.getRequestHeaders(); + opts.headers = opts.headers || {}; + if (requestHeaders && requestHeaders['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = + requestHeaders['x-goog-user-project']; + } + if (requestHeaders && requestHeaders.Authorization) { + opts.headers.Authorization = requestHeaders.Authorization; + } + response = await this.transporter.request(opts); + } + catch (e) { + const res = e.response; + if (res) { + const statusCode = res.status; + // Retry the request for metadata if the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - forceRefreshOnFailure is true + const isReadableStream = res.config.data instanceof stream.Readable; + const isAuthErr = statusCode === 401 || statusCode === 403; + if (!reAuthRetried && + isAuthErr && + !isReadableStream && + this.forceRefreshOnFailure) { + await this.refreshAccessTokenAsync(); + return await this.requestAsync(opts, true); + } + } + throw e; + } + return response; + } + /** + * Forces token refresh, even if unexpired tokens are currently cached. + * GCP access tokens are retrieved from authclient object/source credential. + * Then GCP access tokens are exchanged for downscoped access tokens via the + * token exchange endpoint. + * @return A promise that resolves with the fresh downscoped access token. + */ + async refreshAccessTokenAsync() { + var _a; + // Retrieve GCP access token from source credential. + const subjectToken = (await this.authClient.getAccessToken()).token; + // Construct the STS credentials options. + const stsCredentialsOptions = { + grantType: STS_GRANT_TYPE, + requestedTokenType: STS_REQUEST_TOKEN_TYPE, + subjectToken: subjectToken, + subjectTokenType: STS_SUBJECT_TOKEN_TYPE, + }; + // Exchange the source AuthClient access token for a Downscoped access + // token. + const stsResponse = await this.stsCredential.exchangeToken(stsCredentialsOptions, undefined, this.credentialAccessBoundary); + /** + * The STS endpoint will only return the expiration time for the downscoped + * access token if the original access token represents a service account. + * The downscoped token's expiration time will always match the source + * credential expiration. When no expires_in is returned, we can copy the + * source credential's expiration time. + */ + const sourceCredExpireDate = ((_a = this.authClient.credentials) === null || _a === void 0 ? void 0 : _a.expiry_date) || null; + const expiryDate = stsResponse.expires_in + ? new Date().getTime() + stsResponse.expires_in * 1000 + : sourceCredExpireDate; + // Save response in cached access token. + this.cachedDownscopedAccessToken = { + access_token: stsResponse.access_token, + expiry_date: expiryDate, + res: stsResponse.res, + }; + // Save credentials. + this.credentials = {}; + Object.assign(this.credentials, this.cachedDownscopedAccessToken); + delete this.credentials.res; + // Trigger tokens event to notify external listeners. + this.emit('tokens', { + refresh_token: null, + expiry_date: this.cachedDownscopedAccessToken.expiry_date, + access_token: this.cachedDownscopedAccessToken.access_token, + token_type: 'Bearer', + id_token: null, + }); + // Return the cached access token. + return this.cachedDownscopedAccessToken; + } + /** + * Returns whether the provided credentials are expired or not. + * If there is no expiry time, assumes the token is not expired or expiring. + * @param downscopedAccessToken The credentials to check for expiration. + * @return Whether the credentials are expired or not. + */ + isExpired(downscopedAccessToken) { + const now = new Date().getTime(); + return downscopedAccessToken.expiry_date + ? now >= + downscopedAccessToken.expiry_date - this.eagerRefreshThresholdMillis + : false; + } +} +exports.DownscopedClient = DownscopedClient; + + +/***/ }), + +/***/ 963: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2018 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GCPEnv = void 0; +exports.clear = clear; +exports.getEnv = getEnv; +const gcpMetadata = __nccwpck_require__(3046); +var GCPEnv; +(function (GCPEnv) { + GCPEnv["APP_ENGINE"] = "APP_ENGINE"; + GCPEnv["KUBERNETES_ENGINE"] = "KUBERNETES_ENGINE"; + GCPEnv["CLOUD_FUNCTIONS"] = "CLOUD_FUNCTIONS"; + GCPEnv["COMPUTE_ENGINE"] = "COMPUTE_ENGINE"; + GCPEnv["CLOUD_RUN"] = "CLOUD_RUN"; + GCPEnv["NONE"] = "NONE"; +})(GCPEnv || (exports.GCPEnv = GCPEnv = {})); +let envPromise; +function clear() { + envPromise = undefined; +} +async function getEnv() { + if (envPromise) { + return envPromise; + } + envPromise = getEnvMemoized(); + return envPromise; +} +async function getEnvMemoized() { + let env = GCPEnv.NONE; + if (isAppEngine()) { + env = GCPEnv.APP_ENGINE; + } + else if (isCloudFunction()) { + env = GCPEnv.CLOUD_FUNCTIONS; + } + else if (await isComputeEngine()) { + if (await isKubernetesEngine()) { + env = GCPEnv.KUBERNETES_ENGINE; + } + else if (isCloudRun()) { + env = GCPEnv.CLOUD_RUN; + } + else { + env = GCPEnv.COMPUTE_ENGINE; + } + } + else { + env = GCPEnv.NONE; + } + return env; +} +function isAppEngine() { + return !!(process.env.GAE_SERVICE || process.env.GAE_MODULE_NAME); +} +function isCloudFunction() { + return !!(process.env.FUNCTION_NAME || process.env.FUNCTION_TARGET); +} +/** + * This check only verifies that the environment is running knative. + * This must be run *after* checking for Kubernetes, otherwise it will + * return a false positive. + */ +function isCloudRun() { + return !!process.env.K_CONFIGURATION; +} +async function isKubernetesEngine() { + try { + await gcpMetadata.instance('attributes/cluster-name'); + return true; + } + catch (e) { + return false; + } +} +async function isComputeEngine() { + return gcpMetadata.isAvailable(); +} + + +/***/ }), + +/***/ 3247: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.InvalidSubjectTokenError = exports.InvalidMessageFieldError = exports.InvalidCodeFieldError = exports.InvalidTokenTypeFieldError = exports.InvalidExpirationTimeFieldError = exports.InvalidSuccessFieldError = exports.InvalidVersionFieldError = exports.ExecutableResponseError = exports.ExecutableResponse = void 0; +const SAML_SUBJECT_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:saml2'; +const OIDC_SUBJECT_TOKEN_TYPE1 = 'urn:ietf:params:oauth:token-type:id_token'; +const OIDC_SUBJECT_TOKEN_TYPE2 = 'urn:ietf:params:oauth:token-type:jwt'; +/** + * Defines the response of a 3rd party executable run by the pluggable auth client. + */ +class ExecutableResponse { + /** + * Instantiates an ExecutableResponse instance using the provided JSON object + * from the output of the executable. + * @param responseJson Response from a 3rd party executable, loaded from a + * run of the executable or a cached output file. + */ + constructor(responseJson) { + // Check that the required fields exist in the json response. + if (!responseJson.version) { + throw new InvalidVersionFieldError("Executable response must contain a 'version' field."); + } + if (responseJson.success === undefined) { + throw new InvalidSuccessFieldError("Executable response must contain a 'success' field."); + } + this.version = responseJson.version; + this.success = responseJson.success; + // Validate required fields for a successful response. + if (this.success) { + this.expirationTime = responseJson.expiration_time; + this.tokenType = responseJson.token_type; + // Validate token type field. + if (this.tokenType !== SAML_SUBJECT_TOKEN_TYPE && + this.tokenType !== OIDC_SUBJECT_TOKEN_TYPE1 && + this.tokenType !== OIDC_SUBJECT_TOKEN_TYPE2) { + throw new InvalidTokenTypeFieldError("Executable response must contain a 'token_type' field when successful " + + `and it must be one of ${OIDC_SUBJECT_TOKEN_TYPE1}, ${OIDC_SUBJECT_TOKEN_TYPE2}, or ${SAML_SUBJECT_TOKEN_TYPE}.`); + } + // Validate subject token. + if (this.tokenType === SAML_SUBJECT_TOKEN_TYPE) { + if (!responseJson.saml_response) { + throw new InvalidSubjectTokenError(`Executable response must contain a 'saml_response' field when token_type=${SAML_SUBJECT_TOKEN_TYPE}.`); + } + this.subjectToken = responseJson.saml_response; + } + else { + if (!responseJson.id_token) { + throw new InvalidSubjectTokenError("Executable response must contain a 'id_token' field when " + + `token_type=${OIDC_SUBJECT_TOKEN_TYPE1} or ${OIDC_SUBJECT_TOKEN_TYPE2}.`); + } + this.subjectToken = responseJson.id_token; + } + } + else { + // Both code and message must be provided for unsuccessful responses. + if (!responseJson.code) { + throw new InvalidCodeFieldError("Executable response must contain a 'code' field when unsuccessful."); + } + if (!responseJson.message) { + throw new InvalidMessageFieldError("Executable response must contain a 'message' field when unsuccessful."); + } + this.errorCode = responseJson.code; + this.errorMessage = responseJson.message; + } + } + /** + * @return A boolean representing if the response has a valid token. Returns + * true when the response was successful and the token is not expired. + */ + isValid() { + return !this.isExpired() && this.success; + } + /** + * @return A boolean representing if the response is expired. Returns true if the + * provided timeout has passed. + */ + isExpired() { + return (this.expirationTime !== undefined && + this.expirationTime < Math.round(Date.now() / 1000)); + } +} +exports.ExecutableResponse = ExecutableResponse; +/** + * An error thrown by the ExecutableResponse class. + */ +class ExecutableResponseError extends Error { + constructor(message) { + super(message); + Object.setPrototypeOf(this, new.target.prototype); + } +} +exports.ExecutableResponseError = ExecutableResponseError; +/** + * An error thrown when the 'version' field in an executable response is missing or invalid. + */ +class InvalidVersionFieldError extends ExecutableResponseError { +} +exports.InvalidVersionFieldError = InvalidVersionFieldError; +/** + * An error thrown when the 'success' field in an executable response is missing or invalid. + */ +class InvalidSuccessFieldError extends ExecutableResponseError { +} +exports.InvalidSuccessFieldError = InvalidSuccessFieldError; +/** + * An error thrown when the 'expiration_time' field in an executable response is missing or invalid. + */ +class InvalidExpirationTimeFieldError extends ExecutableResponseError { +} +exports.InvalidExpirationTimeFieldError = InvalidExpirationTimeFieldError; +/** + * An error thrown when the 'token_type' field in an executable response is missing or invalid. + */ +class InvalidTokenTypeFieldError extends ExecutableResponseError { +} +exports.InvalidTokenTypeFieldError = InvalidTokenTypeFieldError; +/** + * An error thrown when the 'code' field in an executable response is missing or invalid. + */ +class InvalidCodeFieldError extends ExecutableResponseError { +} +exports.InvalidCodeFieldError = InvalidCodeFieldError; +/** + * An error thrown when the 'message' field in an executable response is missing or invalid. + */ +class InvalidMessageFieldError extends ExecutableResponseError { +} +exports.InvalidMessageFieldError = InvalidMessageFieldError; +/** + * An error thrown when the subject token in an executable response is missing or invalid. + */ +class InvalidSubjectTokenError extends ExecutableResponseError { +} +exports.InvalidSubjectTokenError = InvalidSubjectTokenError; + + +/***/ }), + +/***/ 4240: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ExternalAccountAuthorizedUserClient = exports.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE = void 0; +const authclient_1 = __nccwpck_require__(4810); +const oauth2common_1 = __nccwpck_require__(6653); +const gaxios_1 = __nccwpck_require__(7003); +const stream = __nccwpck_require__(2203); +const baseexternalclient_1 = __nccwpck_require__(142); +/** + * The credentials JSON file type for external account authorized user clients. + */ +exports.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE = 'external_account_authorized_user'; +const DEFAULT_TOKEN_URL = 'https://sts.{universeDomain}/v1/oauthtoken'; +/** + * Handler for token refresh requests sent to the token_url endpoint for external + * authorized user credentials. + */ +class ExternalAccountAuthorizedUserHandler extends oauth2common_1.OAuthClientAuthHandler { + /** + * Initializes an ExternalAccountAuthorizedUserHandler instance. + * @param url The URL of the token refresh endpoint. + * @param transporter The transporter to use for the refresh request. + * @param clientAuthentication The client authentication credentials to use + * for the refresh request. + */ + constructor(url, transporter, clientAuthentication) { + super(clientAuthentication); + this.url = url; + this.transporter = transporter; + } + /** + * Requests a new access token from the token_url endpoint using the provided + * refresh token. + * @param refreshToken The refresh token to use to generate a new access token. + * @param additionalHeaders Optional additional headers to pass along the + * request. + * @return A promise that resolves with the token refresh response containing + * the requested access token and its expiration time. + */ + async refreshToken(refreshToken, additionalHeaders) { + const values = new URLSearchParams({ + grant_type: 'refresh_token', + refresh_token: refreshToken, + }); + const headers = { + 'Content-Type': 'application/x-www-form-urlencoded', + ...additionalHeaders, + }; + const opts = { + ...ExternalAccountAuthorizedUserHandler.RETRY_CONFIG, + url: this.url, + method: 'POST', + headers, + data: values.toString(), + responseType: 'json', + }; + // Apply OAuth client authentication. + this.applyClientAuthenticationOptions(opts); + try { + const response = await this.transporter.request(opts); + // Successful response. + const tokenRefreshResponse = response.data; + tokenRefreshResponse.res = response; + return tokenRefreshResponse; + } + catch (error) { + // Translate error to OAuthError. + if (error instanceof gaxios_1.GaxiosError && error.response) { + throw (0, oauth2common_1.getErrorFromOAuthErrorResponse)(error.response.data, + // Preserve other fields from the original error. + error); + } + // Request could fail before the server responds. + throw error; + } + } +} +/** + * External Account Authorized User Client. This is used for OAuth2 credentials + * sourced using external identities through Workforce Identity Federation. + * Obtaining the initial access and refresh token can be done through the + * Google Cloud CLI. + */ +class ExternalAccountAuthorizedUserClient extends authclient_1.AuthClient { + /** + * Instantiates an ExternalAccountAuthorizedUserClient instances using the + * provided JSON object loaded from a credentials files. + * An error is throws if the credential is not valid. + * @param options The external account authorized user option object typically + * from the external accoutn authorized user JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + var _a; + super({ ...options, ...additionalOptions }); + if (options.universe_domain) { + this.universeDomain = options.universe_domain; + } + this.refreshToken = options.refresh_token; + const clientAuth = { + confidentialClientType: 'basic', + clientId: options.client_id, + clientSecret: options.client_secret, + }; + this.externalAccountAuthorizedUserHandler = + new ExternalAccountAuthorizedUserHandler((_a = options.token_url) !== null && _a !== void 0 ? _a : DEFAULT_TOKEN_URL.replace('{universeDomain}', this.universeDomain), this.transporter, clientAuth); + this.cachedAccessToken = null; + this.quotaProjectId = options.quota_project_id; + // As threshold could be zero, + // eagerRefreshThresholdMillis || EXPIRATION_TIME_OFFSET will override the + // zero value. + if (typeof (additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.eagerRefreshThresholdMillis) !== 'number') { + this.eagerRefreshThresholdMillis = baseexternalclient_1.EXPIRATION_TIME_OFFSET; + } + else { + this.eagerRefreshThresholdMillis = additionalOptions + .eagerRefreshThresholdMillis; + } + this.forceRefreshOnFailure = !!(additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.forceRefreshOnFailure); + } + async getAccessToken() { + // If cached access token is unavailable or expired, force refresh. + if (!this.cachedAccessToken || this.isExpired(this.cachedAccessToken)) { + await this.refreshAccessTokenAsync(); + } + // Return GCP access token in GetAccessTokenResponse format. + return { + token: this.cachedAccessToken.access_token, + res: this.cachedAccessToken.res, + }; + } + async getRequestHeaders() { + const accessTokenResponse = await this.getAccessToken(); + const headers = { + Authorization: `Bearer ${accessTokenResponse.token}`, + }; + return this.addSharedMetadataHeaders(headers); + } + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); + }); + } + else { + return this.requestAsync(opts); + } + } + /** + * Authenticates the provided HTTP request, processes it and resolves with the + * returned response. + * @param opts The HTTP request options. + * @param reAuthRetried Whether the current attempt is a retry after a failed attempt due to an auth failure. + * @return A promise that resolves with the successful response. + */ + async requestAsync(opts, reAuthRetried = false) { + let response; + try { + const requestHeaders = await this.getRequestHeaders(); + opts.headers = opts.headers || {}; + if (requestHeaders && requestHeaders['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = + requestHeaders['x-goog-user-project']; + } + if (requestHeaders && requestHeaders.Authorization) { + opts.headers.Authorization = requestHeaders.Authorization; + } + response = await this.transporter.request(opts); + } + catch (e) { + const res = e.response; + if (res) { + const statusCode = res.status; + // Retry the request for metadata if the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - forceRefreshOnFailure is true + const isReadableStream = res.config.data instanceof stream.Readable; + const isAuthErr = statusCode === 401 || statusCode === 403; + if (!reAuthRetried && + isAuthErr && + !isReadableStream && + this.forceRefreshOnFailure) { + await this.refreshAccessTokenAsync(); + return await this.requestAsync(opts, true); + } + } + throw e; + } + return response; + } + /** + * Forces token refresh, even if unexpired tokens are currently cached. + * @return A promise that resolves with the refreshed credential. + */ + async refreshAccessTokenAsync() { + // Refresh the access token using the refresh token. + const refreshResponse = await this.externalAccountAuthorizedUserHandler.refreshToken(this.refreshToken); + this.cachedAccessToken = { + access_token: refreshResponse.access_token, + expiry_date: new Date().getTime() + refreshResponse.expires_in * 1000, + res: refreshResponse.res, + }; + if (refreshResponse.refresh_token !== undefined) { + this.refreshToken = refreshResponse.refresh_token; + } + return this.cachedAccessToken; + } + /** + * Returns whether the provided credentials are expired or not. + * If there is no expiry time, assumes the token is not expired or expiring. + * @param credentials The credentials to check for expiration. + * @return Whether the credentials are expired or not. + */ + isExpired(credentials) { + const now = new Date().getTime(); + return credentials.expiry_date + ? now >= credentials.expiry_date - this.eagerRefreshThresholdMillis + : false; + } +} +exports.ExternalAccountAuthorizedUserClient = ExternalAccountAuthorizedUserClient; + + +/***/ }), + +/***/ 8323: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ExternalAccountClient = void 0; +const baseexternalclient_1 = __nccwpck_require__(142); +const identitypoolclient_1 = __nccwpck_require__(9960); +const awsclient_1 = __nccwpck_require__(1261); +const pluggable_auth_client_1 = __nccwpck_require__(6077); +/** + * Dummy class with no constructor. Developers are expected to use fromJSON. + */ +class ExternalAccountClient { + constructor() { + throw new Error('ExternalAccountClients should be initialized via: ' + + 'ExternalAccountClient.fromJSON(), ' + + 'directly via explicit constructors, eg. ' + + 'new AwsClient(options), new IdentityPoolClient(options), new' + + 'PluggableAuthClientOptions, or via ' + + 'new GoogleAuth(options).getClient()'); + } + /** + * This static method will instantiate the + * corresponding type of external account credential depending on the + * underlying credential source. + * @param options The external account options object typically loaded + * from the external account JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + * @return A BaseExternalAccountClient instance or null if the options + * provided do not correspond to an external account credential. + */ + static fromJSON(options, additionalOptions) { + var _a, _b; + if (options && options.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { + if ((_a = options.credential_source) === null || _a === void 0 ? void 0 : _a.environment_id) { + return new awsclient_1.AwsClient(options, additionalOptions); + } + else if ((_b = options.credential_source) === null || _b === void 0 ? void 0 : _b.executable) { + return new pluggable_auth_client_1.PluggableAuthClient(options, additionalOptions); + } + else { + return new identitypoolclient_1.IdentityPoolClient(options, additionalOptions); + } + } + else { + return null; + } + } +} +exports.ExternalAccountClient = ExternalAccountClient; + + +/***/ }), + +/***/ 932: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var _a, _b, _c; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.FileSubjectTokenSupplier = void 0; +const util_1 = __nccwpck_require__(9023); +const fs = __nccwpck_require__(9896); +// fs.readfile is undefined in browser karma tests causing +// `npm run browser-test` to fail as test.oauth2.ts imports this file via +// src/index.ts. +// Fallback to void function to avoid promisify throwing a TypeError. +const readFile = (0, util_1.promisify)((_a = fs.readFile) !== null && _a !== void 0 ? _a : (() => { })); +const realpath = (0, util_1.promisify)((_b = fs.realpath) !== null && _b !== void 0 ? _b : (() => { })); +const lstat = (0, util_1.promisify)((_c = fs.lstat) !== null && _c !== void 0 ? _c : (() => { })); +/** + * Internal subject token supplier implementation used when a file location + * is configured in the credential configuration used to build an {@link IdentityPoolClient} + */ +class FileSubjectTokenSupplier { + /** + * Instantiates a new file based subject token supplier. + * @param opts The file subject token supplier options to build the supplier + * with. + */ + constructor(opts) { + this.filePath = opts.filePath; + this.formatType = opts.formatType; + this.subjectTokenFieldName = opts.subjectTokenFieldName; + } + /** + * Returns the subject token stored at the file specified in the constructor. + * @param context {@link ExternalAccountSupplierContext} from the calling + * {@link IdentityPoolClient}, contains the requested audience and subject + * token type for the external account identity. Not used. + */ + async getSubjectToken(context) { + // Make sure there is a file at the path. lstatSync will throw if there is + // nothing there. + let parsedFilePath = this.filePath; + try { + // Resolve path to actual file in case of symlink. Expect a thrown error + // if not resolvable. + parsedFilePath = await realpath(parsedFilePath); + if (!(await lstat(parsedFilePath)).isFile()) { + throw new Error(); + } + } + catch (err) { + if (err instanceof Error) { + err.message = `The file at ${parsedFilePath} does not exist, or it is not a file. ${err.message}`; + } + throw err; + } + let subjectToken; + const rawText = await readFile(parsedFilePath, { encoding: 'utf8' }); + if (this.formatType === 'text') { + subjectToken = rawText; + } + else if (this.formatType === 'json' && this.subjectTokenFieldName) { + const json = JSON.parse(rawText); + subjectToken = json[this.subjectTokenFieldName]; + } + if (!subjectToken) { + throw new Error('Unable to parse the subject_token from the credential_source file'); + } + return subjectToken; + } +} +exports.FileSubjectTokenSupplier = FileSubjectTokenSupplier; + + +/***/ }), + +/***/ 5934: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var _GoogleAuth_instances, _GoogleAuth_pendingAuthClient, _GoogleAuth_prepareAndCacheClient, _GoogleAuth_determineClient; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GoogleAuth = exports.GoogleAuthExceptionMessages = exports.CLOUD_SDK_CLIENT_ID = void 0; +const child_process_1 = __nccwpck_require__(5317); +const fs = __nccwpck_require__(9896); +const gcpMetadata = __nccwpck_require__(3046); +const os = __nccwpck_require__(857); +const path = __nccwpck_require__(6928); +const crypto_1 = __nccwpck_require__(8851); +const transporters_1 = __nccwpck_require__(7633); +const computeclient_1 = __nccwpck_require__(977); +const idtokenclient_1 = __nccwpck_require__(2718); +const envDetect_1 = __nccwpck_require__(963); +const jwtclient_1 = __nccwpck_require__(5277); +const refreshclient_1 = __nccwpck_require__(9807); +const impersonated_1 = __nccwpck_require__(9964); +const externalclient_1 = __nccwpck_require__(8323); +const baseexternalclient_1 = __nccwpck_require__(142); +const authclient_1 = __nccwpck_require__(4810); +const externalAccountAuthorizedUserClient_1 = __nccwpck_require__(4240); +const util_1 = __nccwpck_require__(7870); +exports.CLOUD_SDK_CLIENT_ID = '764086051850-6qr4p6gpi6hn506pt8ejuq83di341hur.apps.googleusercontent.com'; +exports.GoogleAuthExceptionMessages = { + API_KEY_WITH_CREDENTIALS: 'API Keys and Credentials are mutually exclusive authentication methods and cannot be used together.', + NO_PROJECT_ID_FOUND: 'Unable to detect a Project Id in the current environment. \n' + + 'To learn more about authentication and Google APIs, visit: \n' + + 'https://cloud.google.com/docs/authentication/getting-started', + NO_CREDENTIALS_FOUND: 'Unable to find credentials in current environment. \n' + + 'To learn more about authentication and Google APIs, visit: \n' + + 'https://cloud.google.com/docs/authentication/getting-started', + NO_ADC_FOUND: 'Could not load the default credentials. Browse to https://cloud.google.com/docs/authentication/getting-started for more information.', + NO_UNIVERSE_DOMAIN_FOUND: 'Unable to detect a Universe Domain in the current environment.\n' + + 'To learn more about Universe Domain retrieval, visit: \n' + + 'https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys', +}; +class GoogleAuth { + // Note: this properly is only public to satisfy unit tests. + // https://github.com/Microsoft/TypeScript/issues/5228 + get isGCE() { + return this.checkIsGCE; + } + /** + * Configuration is resolved in the following order of precedence: + * - {@link GoogleAuthOptions.credentials `credentials`} + * - {@link GoogleAuthOptions.keyFilename `keyFilename`} + * - {@link GoogleAuthOptions.keyFile `keyFile`} + * + * {@link GoogleAuthOptions.clientOptions `clientOptions`} are passed to the + * {@link AuthClient `AuthClient`s}. + * + * @param opts + */ + constructor(opts = {}) { + _GoogleAuth_instances.add(this); + /** + * Caches a value indicating whether the auth layer is running on Google + * Compute Engine. + * @private + */ + this.checkIsGCE = undefined; + // To save the contents of the JSON credential file + this.jsonContent = null; + this.cachedCredential = null; + /** + * A pending {@link AuthClient}. Used for concurrent {@link GoogleAuth.getClient} calls. + */ + _GoogleAuth_pendingAuthClient.set(this, null); + this.clientOptions = {}; + this._cachedProjectId = opts.projectId || null; + this.cachedCredential = opts.authClient || null; + this.keyFilename = opts.keyFilename || opts.keyFile; + this.scopes = opts.scopes; + this.clientOptions = opts.clientOptions || {}; + this.jsonContent = opts.credentials || null; + this.apiKey = opts.apiKey || this.clientOptions.apiKey || null; + // Cannot use both API Key + Credentials + if (this.apiKey && (this.jsonContent || this.clientOptions.credentials)) { + throw new RangeError(exports.GoogleAuthExceptionMessages.API_KEY_WITH_CREDENTIALS); + } + if (opts.universeDomain) { + this.clientOptions.universeDomain = opts.universeDomain; + } + } + // GAPIC client libraries should always use self-signed JWTs. The following + // variables are set on the JWT client in order to indicate the type of library, + // and sign the JWT with the correct audience and scopes (if not supplied). + setGapicJWTValues(client) { + client.defaultServicePath = this.defaultServicePath; + client.useJWTAccessWithScope = this.useJWTAccessWithScope; + client.defaultScopes = this.defaultScopes; + } + getProjectId(callback) { + if (callback) { + this.getProjectIdAsync().then(r => callback(null, r), callback); + } + else { + return this.getProjectIdAsync(); + } + } + /** + * A temporary method for internal `getProjectId` usages where `null` is + * acceptable. In a future major release, `getProjectId` should return `null` + * (as the `Promise` base signature describes) and this private + * method should be removed. + * + * @returns Promise that resolves with project id (or `null`) + */ + async getProjectIdOptional() { + try { + return await this.getProjectId(); + } + catch (e) { + if (e instanceof Error && + e.message === exports.GoogleAuthExceptionMessages.NO_PROJECT_ID_FOUND) { + return null; + } + else { + throw e; + } + } + } + /** + * A private method for finding and caching a projectId. + * + * Supports environments in order of precedence: + * - GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variable + * - GOOGLE_APPLICATION_CREDENTIALS JSON file + * - Cloud SDK: `gcloud config config-helper --format json` + * - GCE project ID from metadata server + * + * @returns projectId + */ + async findAndCacheProjectId() { + let projectId = null; + projectId || (projectId = await this.getProductionProjectId()); + projectId || (projectId = await this.getFileProjectId()); + projectId || (projectId = await this.getDefaultServiceProjectId()); + projectId || (projectId = await this.getGCEProjectId()); + projectId || (projectId = await this.getExternalAccountClientProjectId()); + if (projectId) { + this._cachedProjectId = projectId; + return projectId; + } + else { + throw new Error(exports.GoogleAuthExceptionMessages.NO_PROJECT_ID_FOUND); + } + } + async getProjectIdAsync() { + if (this._cachedProjectId) { + return this._cachedProjectId; + } + if (!this._findProjectIdPromise) { + this._findProjectIdPromise = this.findAndCacheProjectId(); + } + return this._findProjectIdPromise; + } + /** + * Retrieves a universe domain from the metadata server via + * {@link gcpMetadata.universe}. + * + * @returns a universe domain + */ + async getUniverseDomainFromMetadataServer() { + var _a; + let universeDomain; + try { + universeDomain = await gcpMetadata.universe('universe-domain'); + universeDomain || (universeDomain = authclient_1.DEFAULT_UNIVERSE); + } + catch (e) { + if (e && ((_a = e === null || e === void 0 ? void 0 : e.response) === null || _a === void 0 ? void 0 : _a.status) === 404) { + universeDomain = authclient_1.DEFAULT_UNIVERSE; + } + else { + throw e; + } + } + return universeDomain; + } + /** + * Retrieves, caches, and returns the universe domain in the following order + * of precedence: + * - The universe domain in {@link GoogleAuth.clientOptions} + * - An existing or ADC {@link AuthClient}'s universe domain + * - {@link gcpMetadata.universe}, if {@link Compute} client + * + * @returns The universe domain + */ + async getUniverseDomain() { + let universeDomain = (0, util_1.originalOrCamelOptions)(this.clientOptions).get('universe_domain'); + try { + universeDomain !== null && universeDomain !== void 0 ? universeDomain : (universeDomain = (await this.getClient()).universeDomain); + } + catch (_a) { + // client or ADC is not available + universeDomain !== null && universeDomain !== void 0 ? universeDomain : (universeDomain = authclient_1.DEFAULT_UNIVERSE); + } + return universeDomain; + } + /** + * @returns Any scopes (user-specified or default scopes specified by the + * client library) that need to be set on the current Auth client. + */ + getAnyScopes() { + return this.scopes || this.defaultScopes; + } + getApplicationDefault(optionsOrCallback = {}, callback) { + let options; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else { + options = optionsOrCallback; + } + if (callback) { + this.getApplicationDefaultAsync(options).then(r => callback(null, r.credential, r.projectId), callback); + } + else { + return this.getApplicationDefaultAsync(options); + } + } + async getApplicationDefaultAsync(options = {}) { + // If we've already got a cached credential, return it. + // This will also preserve one's configured quota project, in case they + // set one directly on the credential previously. + if (this.cachedCredential) { + // cache, while preserving existing quota project preferences + return await __classPrivateFieldGet(this, _GoogleAuth_instances, "m", _GoogleAuth_prepareAndCacheClient).call(this, this.cachedCredential, null); + } + let credential; + // Check for the existence of a local environment variable pointing to the + // location of the credential file. This is typically used in local + // developer scenarios. + credential = + await this._tryGetApplicationCredentialsFromEnvironmentVariable(options); + if (credential) { + if (credential instanceof jwtclient_1.JWT) { + credential.scopes = this.scopes; + } + else if (credential instanceof baseexternalclient_1.BaseExternalAccountClient) { + credential.scopes = this.getAnyScopes(); + } + return await __classPrivateFieldGet(this, _GoogleAuth_instances, "m", _GoogleAuth_prepareAndCacheClient).call(this, credential); + } + // Look in the well-known credential file location. + credential = + await this._tryGetApplicationCredentialsFromWellKnownFile(options); + if (credential) { + if (credential instanceof jwtclient_1.JWT) { + credential.scopes = this.scopes; + } + else if (credential instanceof baseexternalclient_1.BaseExternalAccountClient) { + credential.scopes = this.getAnyScopes(); + } + return await __classPrivateFieldGet(this, _GoogleAuth_instances, "m", _GoogleAuth_prepareAndCacheClient).call(this, credential); + } + // Determine if we're running on GCE. + if (await this._checkIsGCE()) { + options.scopes = this.getAnyScopes(); + return await __classPrivateFieldGet(this, _GoogleAuth_instances, "m", _GoogleAuth_prepareAndCacheClient).call(this, new computeclient_1.Compute(options)); + } + throw new Error(exports.GoogleAuthExceptionMessages.NO_ADC_FOUND); + } + /** + * Determines whether the auth layer is running on Google Compute Engine. + * Checks for GCP Residency, then fallback to checking if metadata server + * is available. + * + * @returns A promise that resolves with the boolean. + * @api private + */ + async _checkIsGCE() { + if (this.checkIsGCE === undefined) { + this.checkIsGCE = + gcpMetadata.getGCPResidency() || (await gcpMetadata.isAvailable()); + } + return this.checkIsGCE; + } + /** + * Attempts to load default credentials from the environment variable path.. + * @returns Promise that resolves with the OAuth2Client or null. + * @api private + */ + async _tryGetApplicationCredentialsFromEnvironmentVariable(options) { + const credentialsPath = process.env['GOOGLE_APPLICATION_CREDENTIALS'] || + process.env['google_application_credentials']; + if (!credentialsPath || credentialsPath.length === 0) { + return null; + } + try { + return this._getApplicationCredentialsFromFilePath(credentialsPath, options); + } + catch (e) { + if (e instanceof Error) { + e.message = `Unable to read the credential file specified by the GOOGLE_APPLICATION_CREDENTIALS environment variable: ${e.message}`; + } + throw e; + } + } + /** + * Attempts to load default credentials from a well-known file location + * @return Promise that resolves with the OAuth2Client or null. + * @api private + */ + async _tryGetApplicationCredentialsFromWellKnownFile(options) { + // First, figure out the location of the file, depending upon the OS type. + let location = null; + if (this._isWindows()) { + // Windows + location = process.env['APPDATA']; + } + else { + // Linux or Mac + const home = process.env['HOME']; + if (home) { + location = path.join(home, '.config'); + } + } + // If we found the root path, expand it. + if (location) { + location = path.join(location, 'gcloud', 'application_default_credentials.json'); + if (!fs.existsSync(location)) { + location = null; + } + } + // The file does not exist. + if (!location) { + return null; + } + // The file seems to exist. Try to use it. + const client = await this._getApplicationCredentialsFromFilePath(location, options); + return client; + } + /** + * Attempts to load default credentials from a file at the given path.. + * @param filePath The path to the file to read. + * @returns Promise that resolves with the OAuth2Client + * @api private + */ + async _getApplicationCredentialsFromFilePath(filePath, options = {}) { + // Make sure the path looks like a string. + if (!filePath || filePath.length === 0) { + throw new Error('The file path is invalid.'); + } + // Make sure there is a file at the path. lstatSync will throw if there is + // nothing there. + try { + // Resolve path to actual file in case of symlink. Expect a thrown error + // if not resolvable. + filePath = fs.realpathSync(filePath); + if (!fs.lstatSync(filePath).isFile()) { + throw new Error(); + } + } + catch (err) { + if (err instanceof Error) { + err.message = `The file at ${filePath} does not exist, or it is not a file. ${err.message}`; + } + throw err; + } + // Now open a read stream on the file, and parse it. + const readStream = fs.createReadStream(filePath); + return this.fromStream(readStream, options); + } + /** + * Create a credentials instance using a given impersonated input options. + * @param json The impersonated input object. + * @returns JWT or UserRefresh Client with data + */ + fromImpersonatedJSON(json) { + var _a, _b, _c, _d; + if (!json) { + throw new Error('Must pass in a JSON object containing an impersonated refresh token'); + } + if (json.type !== impersonated_1.IMPERSONATED_ACCOUNT_TYPE) { + throw new Error(`The incoming JSON object does not have the "${impersonated_1.IMPERSONATED_ACCOUNT_TYPE}" type`); + } + if (!json.source_credentials) { + throw new Error('The incoming JSON object does not contain a source_credentials field'); + } + if (!json.service_account_impersonation_url) { + throw new Error('The incoming JSON object does not contain a service_account_impersonation_url field'); + } + const sourceClient = this.fromJSON(json.source_credentials); + if (((_a = json.service_account_impersonation_url) === null || _a === void 0 ? void 0 : _a.length) > 256) { + /** + * Prevents DOS attacks. + * @see {@link https://github.com/googleapis/google-auth-library-nodejs/security/code-scanning/85} + **/ + throw new RangeError(`Target principal is too long: ${json.service_account_impersonation_url}`); + } + // Extract service account from service_account_impersonation_url + const targetPrincipal = (_c = (_b = /(?[^/]+):(generateAccessToken|generateIdToken)$/.exec(json.service_account_impersonation_url)) === null || _b === void 0 ? void 0 : _b.groups) === null || _c === void 0 ? void 0 : _c.target; + if (!targetPrincipal) { + throw new RangeError(`Cannot extract target principal from ${json.service_account_impersonation_url}`); + } + const targetScopes = (_d = this.getAnyScopes()) !== null && _d !== void 0 ? _d : []; + return new impersonated_1.Impersonated({ + ...json, + sourceClient, + targetPrincipal, + targetScopes: Array.isArray(targetScopes) ? targetScopes : [targetScopes], + }); + } + /** + * Create a credentials instance using the given input options. + * This client is not cached. + * + * **Important**: If you accept a credential configuration (credential JSON/File/Stream) from an external source for authentication to Google Cloud, you must validate it before providing it to any Google API or library. Providing an unvalidated credential configuration to Google APIs can compromise the security of your systems and data. For more information, refer to {@link https://cloud.google.com/docs/authentication/external/externally-sourced-credentials Validate credential configurations from external sources}. + * + * @param json The input object. + * @param options The JWT or UserRefresh options for the client + * @returns JWT or UserRefresh Client with data + */ + fromJSON(json, options = {}) { + let client; + // user's preferred universe domain + const preferredUniverseDomain = (0, util_1.originalOrCamelOptions)(options).get('universe_domain'); + if (json.type === refreshclient_1.USER_REFRESH_ACCOUNT_TYPE) { + client = new refreshclient_1.UserRefreshClient(options); + client.fromJSON(json); + } + else if (json.type === impersonated_1.IMPERSONATED_ACCOUNT_TYPE) { + client = this.fromImpersonatedJSON(json); + } + else if (json.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { + client = externalclient_1.ExternalAccountClient.fromJSON(json, options); + client.scopes = this.getAnyScopes(); + } + else if (json.type === externalAccountAuthorizedUserClient_1.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE) { + client = new externalAccountAuthorizedUserClient_1.ExternalAccountAuthorizedUserClient(json, options); + } + else { + options.scopes = this.scopes; + client = new jwtclient_1.JWT(options); + this.setGapicJWTValues(client); + client.fromJSON(json); + } + if (preferredUniverseDomain) { + client.universeDomain = preferredUniverseDomain; + } + return client; + } + /** + * Return a JWT or UserRefreshClient from JavaScript object, caching both the + * object used to instantiate and the client. + * @param json The input object. + * @param options The JWT or UserRefresh options for the client + * @returns JWT or UserRefresh Client with data + */ + _cacheClientFromJSON(json, options) { + const client = this.fromJSON(json, options); + // cache both raw data used to instantiate client and client itself. + this.jsonContent = json; + this.cachedCredential = client; + return client; + } + fromStream(inputStream, optionsOrCallback = {}, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else { + options = optionsOrCallback; + } + if (callback) { + this.fromStreamAsync(inputStream, options).then(r => callback(null, r), callback); + } + else { + return this.fromStreamAsync(inputStream, options); + } + } + fromStreamAsync(inputStream, options) { + return new Promise((resolve, reject) => { + if (!inputStream) { + throw new Error('Must pass in a stream containing the Google auth settings.'); + } + const chunks = []; + inputStream + .setEncoding('utf8') + .on('error', reject) + .on('data', chunk => chunks.push(chunk)) + .on('end', () => { + try { + try { + const data = JSON.parse(chunks.join('')); + const r = this._cacheClientFromJSON(data, options); + return resolve(r); + } + catch (err) { + // If we failed parsing this.keyFileName, assume that it + // is a PEM or p12 certificate: + if (!this.keyFilename) + throw err; + const client = new jwtclient_1.JWT({ + ...this.clientOptions, + keyFile: this.keyFilename, + }); + this.cachedCredential = client; + this.setGapicJWTValues(client); + return resolve(client); + } + } + catch (err) { + return reject(err); + } + }); + }); + } + /** + * Create a credentials instance using the given API key string. + * The created client is not cached. In order to create and cache it use the {@link GoogleAuth.getClient `getClient`} method after first providing an {@link GoogleAuth.apiKey `apiKey`}. + * + * @param apiKey The API key string + * @param options An optional options object. + * @returns A JWT loaded from the key + */ + fromAPIKey(apiKey, options = {}) { + return new jwtclient_1.JWT({ ...options, apiKey }); + } + /** + * Determines whether the current operating system is Windows. + * @api private + */ + _isWindows() { + const sys = os.platform(); + if (sys && sys.length >= 3) { + if (sys.substring(0, 3).toLowerCase() === 'win') { + return true; + } + } + return false; + } + /** + * Run the Google Cloud SDK command that prints the default project ID + */ + async getDefaultServiceProjectId() { + return new Promise(resolve => { + (0, child_process_1.exec)('gcloud config config-helper --format json', (err, stdout) => { + if (!err && stdout) { + try { + const projectId = JSON.parse(stdout).configuration.properties.core.project; + resolve(projectId); + return; + } + catch (e) { + // ignore errors + } + } + resolve(null); + }); + }); + } + /** + * Loads the project id from environment variables. + * @api private + */ + getProductionProjectId() { + return (process.env['GCLOUD_PROJECT'] || + process.env['GOOGLE_CLOUD_PROJECT'] || + process.env['gcloud_project'] || + process.env['google_cloud_project']); + } + /** + * Loads the project id from the GOOGLE_APPLICATION_CREDENTIALS json file. + * @api private + */ + async getFileProjectId() { + if (this.cachedCredential) { + // Try to read the project ID from the cached credentials file + return this.cachedCredential.projectId; + } + // Ensure the projectId is loaded from the keyFile if available. + if (this.keyFilename) { + const creds = await this.getClient(); + if (creds && creds.projectId) { + return creds.projectId; + } + } + // Try to load a credentials file and read its project ID + const r = await this._tryGetApplicationCredentialsFromEnvironmentVariable(); + if (r) { + return r.projectId; + } + else { + return null; + } + } + /** + * Gets the project ID from external account client if available. + */ + async getExternalAccountClientProjectId() { + if (!this.jsonContent || this.jsonContent.type !== baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { + return null; + } + const creds = await this.getClient(); + // Do not suppress the underlying error, as the error could contain helpful + // information for debugging and fixing. This is especially true for + // external account creds as in order to get the project ID, the following + // operations have to succeed: + // 1. Valid credentials file should be supplied. + // 2. Ability to retrieve access tokens from STS token exchange API. + // 3. Ability to exchange for service account impersonated credentials (if + // enabled). + // 4. Ability to get project info using the access token from step 2 or 3. + // Without surfacing the error, it is harder for developers to determine + // which step went wrong. + return await creds.getProjectId(); + } + /** + * Gets the Compute Engine project ID if it can be inferred. + */ + async getGCEProjectId() { + try { + const r = await gcpMetadata.project('project-id'); + return r; + } + catch (e) { + // Ignore any errors + return null; + } + } + getCredentials(callback) { + if (callback) { + this.getCredentialsAsync().then(r => callback(null, r), callback); + } + else { + return this.getCredentialsAsync(); + } + } + async getCredentialsAsync() { + const client = await this.getClient(); + if (client instanceof impersonated_1.Impersonated) { + return { client_email: client.getTargetPrincipal() }; + } + if (client instanceof baseexternalclient_1.BaseExternalAccountClient) { + const serviceAccountEmail = client.getServiceAccountEmail(); + if (serviceAccountEmail) { + return { + client_email: serviceAccountEmail, + universe_domain: client.universeDomain, + }; + } + } + if (this.jsonContent) { + return { + client_email: this.jsonContent.client_email, + private_key: this.jsonContent.private_key, + universe_domain: this.jsonContent.universe_domain, + }; + } + if (await this._checkIsGCE()) { + const [client_email, universe_domain] = await Promise.all([ + gcpMetadata.instance('service-accounts/default/email'), + this.getUniverseDomain(), + ]); + return { client_email, universe_domain }; + } + throw new Error(exports.GoogleAuthExceptionMessages.NO_CREDENTIALS_FOUND); + } + /** + * Automatically obtain an {@link AuthClient `AuthClient`} based on the + * provided configuration. If no options were passed, use Application + * Default Credentials. + */ + async getClient() { + if (this.cachedCredential) { + return this.cachedCredential; + } + // Use an existing auth client request, or cache a new one + __classPrivateFieldSet(this, _GoogleAuth_pendingAuthClient, __classPrivateFieldGet(this, _GoogleAuth_pendingAuthClient, "f") || __classPrivateFieldGet(this, _GoogleAuth_instances, "m", _GoogleAuth_determineClient).call(this), "f"); + try { + return await __classPrivateFieldGet(this, _GoogleAuth_pendingAuthClient, "f"); + } + finally { + // reset the pending auth client in case it is changed later + __classPrivateFieldSet(this, _GoogleAuth_pendingAuthClient, null, "f"); + } + } + /** + * Creates a client which will fetch an ID token for authorization. + * @param targetAudience the audience for the fetched ID token. + * @returns IdTokenClient for making HTTP calls authenticated with ID tokens. + */ + async getIdTokenClient(targetAudience) { + const client = await this.getClient(); + if (!('fetchIdToken' in client)) { + throw new Error('Cannot fetch ID token in this environment, use GCE or set the GOOGLE_APPLICATION_CREDENTIALS environment variable to a service account credentials JSON file.'); + } + return new idtokenclient_1.IdTokenClient({ targetAudience, idTokenProvider: client }); + } + /** + * Automatically obtain application default credentials, and return + * an access token for making requests. + */ + async getAccessToken() { + const client = await this.getClient(); + return (await client.getAccessToken()).token; + } + /** + * Obtain the HTTP headers that will provide authorization for a given + * request. + */ + async getRequestHeaders(url) { + const client = await this.getClient(); + return client.getRequestHeaders(url); + } + /** + * Obtain credentials for a request, then attach the appropriate headers to + * the request options. + * @param opts Axios or Request options on which to attach the headers + */ + async authorizeRequest(opts) { + opts = opts || {}; + const url = opts.url || opts.uri; + const client = await this.getClient(); + const headers = await client.getRequestHeaders(url); + opts.headers = Object.assign(opts.headers || {}, headers); + return opts; + } + /** + * Automatically obtain application default credentials, and make an + * HTTP request using the given options. + * @param opts Axios request options for the HTTP request. + */ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + async request(opts) { + const client = await this.getClient(); + return client.request(opts); + } + /** + * Determine the compute environment in which the code is running. + */ + getEnv() { + return (0, envDetect_1.getEnv)(); + } + /** + * Sign the given data with the current private key, or go out + * to the IAM API to sign it. + * @param data The data to be signed. + * @param endpoint A custom endpoint to use. + * + * @example + * ``` + * sign('data', 'https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/'); + * ``` + */ + async sign(data, endpoint) { + const client = await this.getClient(); + const universe = await this.getUniverseDomain(); + endpoint = + endpoint || + `https://iamcredentials.${universe}/v1/projects/-/serviceAccounts/`; + if (client instanceof impersonated_1.Impersonated) { + const signed = await client.sign(data); + return signed.signedBlob; + } + const crypto = (0, crypto_1.createCrypto)(); + if (client instanceof jwtclient_1.JWT && client.key) { + const sign = await crypto.sign(client.key, data); + return sign; + } + const creds = await this.getCredentials(); + if (!creds.client_email) { + throw new Error('Cannot sign data without `client_email`.'); + } + return this.signBlob(crypto, creds.client_email, data, endpoint); + } + async signBlob(crypto, emailOrUniqueId, data, endpoint) { + const url = new URL(endpoint + `${emailOrUniqueId}:signBlob`); + const res = await this.request({ + method: 'POST', + url: url.href, + data: { + payload: crypto.encodeBase64StringUtf8(data), + }, + retry: true, + retryConfig: { + httpMethodsToRetry: ['POST'], + }, + }); + return res.data.signedBlob; + } +} +exports.GoogleAuth = GoogleAuth; +_GoogleAuth_pendingAuthClient = new WeakMap(), _GoogleAuth_instances = new WeakSet(), _GoogleAuth_prepareAndCacheClient = async function _GoogleAuth_prepareAndCacheClient(credential, quotaProjectIdOverride = process.env['GOOGLE_CLOUD_QUOTA_PROJECT'] || null) { + const projectId = await this.getProjectIdOptional(); + if (quotaProjectIdOverride) { + credential.quotaProjectId = quotaProjectIdOverride; + } + this.cachedCredential = credential; + return { credential, projectId }; +}, _GoogleAuth_determineClient = async function _GoogleAuth_determineClient() { + if (this.jsonContent) { + return this._cacheClientFromJSON(this.jsonContent, this.clientOptions); + } + else if (this.keyFilename) { + const filePath = path.resolve(this.keyFilename); + const stream = fs.createReadStream(filePath); + return await this.fromStreamAsync(stream, this.clientOptions); + } + else if (this.apiKey) { + const client = await this.fromAPIKey(this.apiKey, this.clientOptions); + client.scopes = this.scopes; + const { credential } = await __classPrivateFieldGet(this, _GoogleAuth_instances, "m", _GoogleAuth_prepareAndCacheClient).call(this, client); + return credential; + } + else { + const { credential } = await this.getApplicationDefaultAsync(this.clientOptions); + return credential; + } +}; +/** + * Export DefaultTransporter as a static property of the class. + */ +GoogleAuth.DefaultTransporter = transporters_1.DefaultTransporter; + + +/***/ }), + +/***/ 7009: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2014 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.IAMAuth = void 0; +class IAMAuth { + /** + * IAM credentials. + * + * @param selector the iam authority selector + * @param token the token + * @constructor + */ + constructor(selector, token) { + this.selector = selector; + this.token = token; + this.selector = selector; + this.token = token; + } + /** + * Acquire the HTTP headers required to make an authenticated request. + */ + getRequestHeaders() { + return { + 'x-goog-iam-authority-selector': this.selector, + 'x-goog-iam-authorization-token': this.token, + }; + } +} +exports.IAMAuth = IAMAuth; + + +/***/ }), + +/***/ 9960: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.IdentityPoolClient = void 0; +const baseexternalclient_1 = __nccwpck_require__(142); +const util_1 = __nccwpck_require__(7870); +const filesubjecttokensupplier_1 = __nccwpck_require__(932); +const urlsubjecttokensupplier_1 = __nccwpck_require__(4627); +/** + * Defines the Url-sourced and file-sourced external account clients mainly + * used for K8s and Azure workloads. + */ +class IdentityPoolClient extends baseexternalclient_1.BaseExternalAccountClient { + /** + * Instantiate an IdentityPoolClient instance using the provided JSON + * object loaded from an external account credentials file. + * An error is thrown if the credential is not a valid file-sourced or + * url-sourced credential or a workforce pool user project is provided + * with a non workforce audience. + * @param options The external account options object typically loaded + * from the external account JSON credential file. The camelCased options + * are aliases for the snake_cased options. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + super(options, additionalOptions); + const opts = (0, util_1.originalOrCamelOptions)(options); + const credentialSource = opts.get('credential_source'); + const subjectTokenSupplier = opts.get('subject_token_supplier'); + // Validate credential sourcing configuration. + if (!credentialSource && !subjectTokenSupplier) { + throw new Error('A credential source or subject token supplier must be specified.'); + } + if (credentialSource && subjectTokenSupplier) { + throw new Error('Only one of credential source or subject token supplier can be specified.'); + } + if (subjectTokenSupplier) { + this.subjectTokenSupplier = subjectTokenSupplier; + this.credentialSourceType = 'programmatic'; + } + else { + const credentialSourceOpts = (0, util_1.originalOrCamelOptions)(credentialSource); + const formatOpts = (0, util_1.originalOrCamelOptions)(credentialSourceOpts.get('format')); + // Text is the default format type. + const formatType = formatOpts.get('type') || 'text'; + const formatSubjectTokenFieldName = formatOpts.get('subject_token_field_name'); + if (formatType !== 'json' && formatType !== 'text') { + throw new Error(`Invalid credential_source format "${formatType}"`); + } + if (formatType === 'json' && !formatSubjectTokenFieldName) { + throw new Error('Missing subject_token_field_name for JSON credential_source format'); + } + const file = credentialSourceOpts.get('file'); + const url = credentialSourceOpts.get('url'); + const headers = credentialSourceOpts.get('headers'); + if (file && url) { + throw new Error('No valid Identity Pool "credential_source" provided, must be either file or url.'); + } + else if (file && !url) { + this.credentialSourceType = 'file'; + this.subjectTokenSupplier = new filesubjecttokensupplier_1.FileSubjectTokenSupplier({ + filePath: file, + formatType: formatType, + subjectTokenFieldName: formatSubjectTokenFieldName, + }); + } + else if (!file && url) { + this.credentialSourceType = 'url'; + this.subjectTokenSupplier = new urlsubjecttokensupplier_1.UrlSubjectTokenSupplier({ + url: url, + formatType: formatType, + subjectTokenFieldName: formatSubjectTokenFieldName, + headers: headers, + additionalGaxiosOptions: IdentityPoolClient.RETRY_CONFIG, + }); + } + else { + throw new Error('No valid Identity Pool "credential_source" provided, must be either file or url.'); + } + } + } + /** + * Triggered when a external subject token is needed to be exchanged for a GCP + * access token via GCP STS endpoint. Gets a subject token by calling + * the configured {@link SubjectTokenSupplier} + * @return A promise that resolves with the external subject token. + */ + async retrieveSubjectToken() { + return this.subjectTokenSupplier.getSubjectToken(this.supplierContext); + } +} +exports.IdentityPoolClient = IdentityPoolClient; + + +/***/ }), + +/***/ 2718: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.IdTokenClient = void 0; +const oauth2client_1 = __nccwpck_require__(91); +class IdTokenClient extends oauth2client_1.OAuth2Client { + /** + * Google ID Token client + * + * Retrieve ID token from the metadata server. + * See: https://cloud.google.com/docs/authentication/get-id-token#metadata-server + */ + constructor(options) { + super(options); + this.targetAudience = options.targetAudience; + this.idTokenProvider = options.idTokenProvider; + } + async getRequestMetadataAsync( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + url) { + if (!this.credentials.id_token || + !this.credentials.expiry_date || + this.isTokenExpiring()) { + const idToken = await this.idTokenProvider.fetchIdToken(this.targetAudience); + this.credentials = { + id_token: idToken, + expiry_date: this.getIdTokenExpiryDate(idToken), + }; + } + const headers = { + Authorization: 'Bearer ' + this.credentials.id_token, + }; + return { headers }; + } + getIdTokenExpiryDate(idToken) { + const payloadB64 = idToken.split('.')[1]; + if (payloadB64) { + const payload = JSON.parse(Buffer.from(payloadB64, 'base64').toString('ascii')); + return payload.exp * 1000; + } + } +} +exports.IdTokenClient = IdTokenClient; + + +/***/ }), + +/***/ 9964: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/** + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Impersonated = exports.IMPERSONATED_ACCOUNT_TYPE = void 0; +const oauth2client_1 = __nccwpck_require__(91); +const gaxios_1 = __nccwpck_require__(7003); +const util_1 = __nccwpck_require__(7870); +exports.IMPERSONATED_ACCOUNT_TYPE = 'impersonated_service_account'; +class Impersonated extends oauth2client_1.OAuth2Client { + /** + * Impersonated service account credentials. + * + * Create a new access token by impersonating another service account. + * + * Impersonated Credentials allowing credentials issued to a user or + * service account to impersonate another. The source project using + * Impersonated Credentials must enable the "IAMCredentials" API. + * Also, the target service account must grant the orginating principal + * the "Service Account Token Creator" IAM role. + * + * @param {object} options - The configuration object. + * @param {object} [options.sourceClient] the source credential used as to + * acquire the impersonated credentials. + * @param {string} [options.targetPrincipal] the service account to + * impersonate. + * @param {string[]} [options.delegates] the chained list of delegates + * required to grant the final access_token. If set, the sequence of + * identities must have "Service Account Token Creator" capability granted to + * the preceding identity. For example, if set to [serviceAccountB, + * serviceAccountC], the sourceCredential must have the Token Creator role on + * serviceAccountB. serviceAccountB must have the Token Creator on + * serviceAccountC. Finally, C must have Token Creator on target_principal. + * If left unset, sourceCredential must have that role on targetPrincipal. + * @param {string[]} [options.targetScopes] scopes to request during the + * authorization grant. + * @param {number} [options.lifetime] number of seconds the delegated + * credential should be valid for up to 3600 seconds by default, or 43,200 + * seconds by extending the token's lifetime, see: + * https://cloud.google.com/iam/docs/creating-short-lived-service-account-credentials#sa-credentials-oauth + * @param {string} [options.endpoint] api endpoint override. + */ + constructor(options = {}) { + var _a, _b, _c, _d, _e, _f; + super(options); + // Start with an expired refresh token, which will automatically be + // refreshed before the first API call is made. + this.credentials = { + expiry_date: 1, + refresh_token: 'impersonated-placeholder', + }; + this.sourceClient = (_a = options.sourceClient) !== null && _a !== void 0 ? _a : new oauth2client_1.OAuth2Client(); + this.targetPrincipal = (_b = options.targetPrincipal) !== null && _b !== void 0 ? _b : ''; + this.delegates = (_c = options.delegates) !== null && _c !== void 0 ? _c : []; + this.targetScopes = (_d = options.targetScopes) !== null && _d !== void 0 ? _d : []; + this.lifetime = (_e = options.lifetime) !== null && _e !== void 0 ? _e : 3600; + const usingExplicitUniverseDomain = !!(0, util_1.originalOrCamelOptions)(options).get('universe_domain'); + if (!usingExplicitUniverseDomain) { + // override the default universe with the source's universe + this.universeDomain = this.sourceClient.universeDomain; + } + else if (this.sourceClient.universeDomain !== this.universeDomain) { + // non-default universe and is not matching the source - this could be a credential leak + throw new RangeError(`Universe domain ${this.sourceClient.universeDomain} in source credentials does not match ${this.universeDomain} universe domain set for impersonated credentials.`); + } + this.endpoint = + (_f = options.endpoint) !== null && _f !== void 0 ? _f : `https://iamcredentials.${this.universeDomain}`; + } + /** + * Signs some bytes. + * + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob Reference Documentation} + * @param blobToSign String to sign. + * + * @returns A {@link SignBlobResponse} denoting the keyID and signedBlob in base64 string + */ + async sign(blobToSign) { + await this.sourceClient.getAccessToken(); + const name = `projects/-/serviceAccounts/${this.targetPrincipal}`; + const u = `${this.endpoint}/v1/${name}:signBlob`; + const body = { + delegates: this.delegates, + payload: Buffer.from(blobToSign).toString('base64'), + }; + const res = await this.sourceClient.request({ + ...Impersonated.RETRY_CONFIG, + url: u, + data: body, + method: 'POST', + }); + return res.data; + } + /** The service account email to be impersonated. */ + getTargetPrincipal() { + return this.targetPrincipal; + } + /** + * Refreshes the access token. + */ + async refreshToken() { + var _a, _b, _c, _d, _e, _f; + try { + await this.sourceClient.getAccessToken(); + const name = 'projects/-/serviceAccounts/' + this.targetPrincipal; + const u = `${this.endpoint}/v1/${name}:generateAccessToken`; + const body = { + delegates: this.delegates, + scope: this.targetScopes, + lifetime: this.lifetime + 's', + }; + const res = await this.sourceClient.request({ + ...Impersonated.RETRY_CONFIG, + url: u, + data: body, + method: 'POST', + }); + const tokenResponse = res.data; + this.credentials.access_token = tokenResponse.accessToken; + this.credentials.expiry_date = Date.parse(tokenResponse.expireTime); + return { + tokens: this.credentials, + res, + }; + } + catch (error) { + if (!(error instanceof Error)) + throw error; + let status = 0; + let message = ''; + if (error instanceof gaxios_1.GaxiosError) { + status = (_c = (_b = (_a = error === null || error === void 0 ? void 0 : error.response) === null || _a === void 0 ? void 0 : _a.data) === null || _b === void 0 ? void 0 : _b.error) === null || _c === void 0 ? void 0 : _c.status; + message = (_f = (_e = (_d = error === null || error === void 0 ? void 0 : error.response) === null || _d === void 0 ? void 0 : _d.data) === null || _e === void 0 ? void 0 : _e.error) === null || _f === void 0 ? void 0 : _f.message; + } + if (status && message) { + error.message = `${status}: unable to impersonate: ${message}`; + throw error; + } + else { + error.message = `unable to impersonate: ${error}`; + throw error; + } + } + } + /** + * Generates an OpenID Connect ID token for a service account. + * + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/generateIdToken Reference Documentation} + * + * @param targetAudience the audience for the fetched ID token. + * @param options the for the request + * @return an OpenID Connect ID token + */ + async fetchIdToken(targetAudience, options) { + var _a, _b; + await this.sourceClient.getAccessToken(); + const name = `projects/-/serviceAccounts/${this.targetPrincipal}`; + const u = `${this.endpoint}/v1/${name}:generateIdToken`; + const body = { + delegates: this.delegates, + audience: targetAudience, + includeEmail: (_a = options === null || options === void 0 ? void 0 : options.includeEmail) !== null && _a !== void 0 ? _a : true, + useEmailAzp: (_b = options === null || options === void 0 ? void 0 : options.includeEmail) !== null && _b !== void 0 ? _b : true, + }; + const res = await this.sourceClient.request({ + ...Impersonated.RETRY_CONFIG, + url: u, + data: body, + method: 'POST', + }); + return res.data.token; + } +} +exports.Impersonated = Impersonated; + + +/***/ }), + +/***/ 7060: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2015 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.JWTAccess = void 0; +const jws = __nccwpck_require__(3324); +const util_1 = __nccwpck_require__(7870); +const DEFAULT_HEADER = { + alg: 'RS256', + typ: 'JWT', +}; +class JWTAccess { + /** + * JWTAccess service account credentials. + * + * Create a new access token by using the credential to create a new JWT token + * that's recognized as the access token. + * + * @param email the service account email address. + * @param key the private key that will be used to sign the token. + * @param keyId the ID of the private key used to sign the token. + */ + constructor(email, key, keyId, eagerRefreshThresholdMillis) { + this.cache = new util_1.LRUCache({ + capacity: 500, + maxAge: 60 * 60 * 1000, + }); + this.email = email; + this.key = key; + this.keyId = keyId; + this.eagerRefreshThresholdMillis = + eagerRefreshThresholdMillis !== null && eagerRefreshThresholdMillis !== void 0 ? eagerRefreshThresholdMillis : 5 * 60 * 1000; + } + /** + * Ensures that we're caching a key appropriately, giving precedence to scopes vs. url + * + * @param url The URI being authorized. + * @param scopes The scope or scopes being authorized + * @returns A string that returns the cached key. + */ + getCachedKey(url, scopes) { + let cacheKey = url; + if (scopes && Array.isArray(scopes) && scopes.length) { + cacheKey = url ? `${url}_${scopes.join('_')}` : `${scopes.join('_')}`; + } + else if (typeof scopes === 'string') { + cacheKey = url ? `${url}_${scopes}` : scopes; + } + if (!cacheKey) { + throw Error('Scopes or url must be provided'); + } + return cacheKey; + } + /** + * Get a non-expired access token, after refreshing if necessary. + * + * @param url The URI being authorized. + * @param additionalClaims An object with a set of additional claims to + * include in the payload. + * @returns An object that includes the authorization header. + */ + getRequestHeaders(url, additionalClaims, scopes) { + // Return cached authorization headers, unless we are within + // eagerRefreshThresholdMillis ms of them expiring: + const key = this.getCachedKey(url, scopes); + const cachedToken = this.cache.get(key); + const now = Date.now(); + if (cachedToken && + cachedToken.expiration - now > this.eagerRefreshThresholdMillis) { + return cachedToken.headers; + } + const iat = Math.floor(Date.now() / 1000); + const exp = JWTAccess.getExpirationTime(iat); + let defaultClaims; + // Turn scopes into space-separated string + if (Array.isArray(scopes)) { + scopes = scopes.join(' '); + } + // If scopes are specified, sign with scopes + if (scopes) { + defaultClaims = { + iss: this.email, + sub: this.email, + scope: scopes, + exp, + iat, + }; + } + else { + defaultClaims = { + iss: this.email, + sub: this.email, + aud: url, + exp, + iat, + }; + } + // if additionalClaims are provided, ensure they do not collide with + // other required claims. + if (additionalClaims) { + for (const claim in defaultClaims) { + if (additionalClaims[claim]) { + throw new Error(`The '${claim}' property is not allowed when passing additionalClaims. This claim is included in the JWT by default.`); + } + } + } + const header = this.keyId + ? { ...DEFAULT_HEADER, kid: this.keyId } + : DEFAULT_HEADER; + const payload = Object.assign(defaultClaims, additionalClaims); + // Sign the jwt and add it to the cache + const signedJWT = jws.sign({ header, payload, secret: this.key }); + const headers = { Authorization: `Bearer ${signedJWT}` }; + this.cache.set(key, { + expiration: exp * 1000, + headers, + }); + return headers; + } + /** + * Returns an expiration time for the JWT token. + * + * @param iat The issued at time for the JWT. + * @returns An expiration time for the JWT. + */ + static getExpirationTime(iat) { + const exp = iat + 3600; // 3600 seconds = 1 hour + return exp; + } + /** + * Create a JWTAccess credentials instance using the given input options. + * @param json The input object. + */ + fromJSON(json) { + if (!json) { + throw new Error('Must pass in a JSON object containing the service account auth settings.'); + } + if (!json.client_email) { + throw new Error('The incoming JSON object does not contain a client_email field'); + } + if (!json.private_key) { + throw new Error('The incoming JSON object does not contain a private_key field'); + } + // Extract the relevant information from the json key file. + this.email = json.client_email; + this.key = json.private_key; + this.keyId = json.private_key_id; + this.projectId = json.project_id; + } + fromStream(inputStream, callback) { + if (callback) { + this.fromStreamAsync(inputStream).then(() => callback(), callback); + } + else { + return this.fromStreamAsync(inputStream); + } + } + fromStreamAsync(inputStream) { + return new Promise((resolve, reject) => { + if (!inputStream) { + reject(new Error('Must pass in a stream containing the service account auth settings.')); + } + let s = ''; + inputStream + .setEncoding('utf8') + .on('data', chunk => (s += chunk)) + .on('error', reject) + .on('end', () => { + try { + const data = JSON.parse(s); + this.fromJSON(data); + resolve(); + } + catch (err) { + reject(err); + } + }); + }); + } +} +exports.JWTAccess = JWTAccess; + + +/***/ }), + +/***/ 5277: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2013 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.JWT = void 0; +const gtoken_1 = __nccwpck_require__(8568); +const jwtaccess_1 = __nccwpck_require__(7060); +const oauth2client_1 = __nccwpck_require__(91); +const authclient_1 = __nccwpck_require__(4810); +class JWT extends oauth2client_1.OAuth2Client { + constructor(optionsOrEmail, keyFile, key, scopes, subject, keyId) { + const opts = optionsOrEmail && typeof optionsOrEmail === 'object' + ? optionsOrEmail + : { email: optionsOrEmail, keyFile, key, keyId, scopes, subject }; + super(opts); + this.email = opts.email; + this.keyFile = opts.keyFile; + this.key = opts.key; + this.keyId = opts.keyId; + this.scopes = opts.scopes; + this.subject = opts.subject; + this.additionalClaims = opts.additionalClaims; + // Start with an expired refresh token, which will automatically be + // refreshed before the first API call is made. + this.credentials = { refresh_token: 'jwt-placeholder', expiry_date: 1 }; + } + /** + * Creates a copy of the credential with the specified scopes. + * @param scopes List of requested scopes or a single scope. + * @return The cloned instance. + */ + createScoped(scopes) { + const jwt = new JWT(this); + jwt.scopes = scopes; + return jwt; + } + /** + * Obtains the metadata to be sent with the request. + * + * @param url the URI being authorized. + */ + async getRequestMetadataAsync(url) { + url = this.defaultServicePath ? `https://${this.defaultServicePath}/` : url; + const useSelfSignedJWT = (!this.hasUserScopes() && url) || + (this.useJWTAccessWithScope && this.hasAnyScopes()) || + this.universeDomain !== authclient_1.DEFAULT_UNIVERSE; + if (this.subject && this.universeDomain !== authclient_1.DEFAULT_UNIVERSE) { + throw new RangeError(`Service Account user is configured for the credential. Domain-wide delegation is not supported in universes other than ${authclient_1.DEFAULT_UNIVERSE}`); + } + if (!this.apiKey && useSelfSignedJWT) { + if (this.additionalClaims && + this.additionalClaims.target_audience) { + const { tokens } = await this.refreshToken(); + return { + headers: this.addSharedMetadataHeaders({ + Authorization: `Bearer ${tokens.id_token}`, + }), + }; + } + else { + // no scopes have been set, but a uri has been provided. Use JWTAccess + // credentials. + if (!this.access) { + this.access = new jwtaccess_1.JWTAccess(this.email, this.key, this.keyId, this.eagerRefreshThresholdMillis); + } + let scopes; + if (this.hasUserScopes()) { + scopes = this.scopes; + } + else if (!url) { + scopes = this.defaultScopes; + } + const useScopes = this.useJWTAccessWithScope || + this.universeDomain !== authclient_1.DEFAULT_UNIVERSE; + const headers = await this.access.getRequestHeaders(url !== null && url !== void 0 ? url : undefined, this.additionalClaims, + // Scopes take precedent over audience for signing, + // so we only provide them if `useJWTAccessWithScope` is on or + // if we are in a non-default universe + useScopes ? scopes : undefined); + return { headers: this.addSharedMetadataHeaders(headers) }; + } + } + else if (this.hasAnyScopes() || this.apiKey) { + return super.getRequestMetadataAsync(url); + } + else { + // If no audience, apiKey, or scopes are provided, we should not attempt + // to populate any headers: + return { headers: {} }; + } + } + /** + * Fetches an ID token. + * @param targetAudience the audience for the fetched ID token. + */ + async fetchIdToken(targetAudience) { + // Create a new gToken for fetching an ID token + const gtoken = new gtoken_1.GoogleToken({ + iss: this.email, + sub: this.subject, + scope: this.scopes || this.defaultScopes, + keyFile: this.keyFile, + key: this.key, + additionalClaims: { target_audience: targetAudience }, + transporter: this.transporter, + }); + await gtoken.getToken({ + forceRefresh: true, + }); + if (!gtoken.idToken) { + throw new Error('Unknown error: Failed to fetch ID token'); + } + return gtoken.idToken; + } + /** + * Determine if there are currently scopes available. + */ + hasUserScopes() { + if (!this.scopes) { + return false; + } + return this.scopes.length > 0; + } + /** + * Are there any default or user scopes defined. + */ + hasAnyScopes() { + if (this.scopes && this.scopes.length > 0) + return true; + if (this.defaultScopes && this.defaultScopes.length > 0) + return true; + return false; + } + authorize(callback) { + if (callback) { + this.authorizeAsync().then(r => callback(null, r), callback); + } + else { + return this.authorizeAsync(); + } + } + async authorizeAsync() { + const result = await this.refreshToken(); + if (!result) { + throw new Error('No result returned'); + } + this.credentials = result.tokens; + this.credentials.refresh_token = 'jwt-placeholder'; + this.key = this.gtoken.key; + this.email = this.gtoken.iss; + return result.tokens; + } + /** + * Refreshes the access token. + * @param refreshToken ignored + * @private + */ + async refreshTokenNoCache( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + refreshToken) { + const gtoken = this.createGToken(); + const token = await gtoken.getToken({ + forceRefresh: this.isTokenExpiring(), + }); + const tokens = { + access_token: token.access_token, + token_type: 'Bearer', + expiry_date: gtoken.expiresAt, + id_token: gtoken.idToken, + }; + this.emit('tokens', tokens); + return { res: null, tokens }; + } + /** + * Create a gToken if it doesn't already exist. + */ + createGToken() { + if (!this.gtoken) { + this.gtoken = new gtoken_1.GoogleToken({ + iss: this.email, + sub: this.subject, + scope: this.scopes || this.defaultScopes, + keyFile: this.keyFile, + key: this.key, + additionalClaims: this.additionalClaims, + transporter: this.transporter, + }); + } + return this.gtoken; + } + /** + * Create a JWT credentials instance using the given input options. + * @param json The input object. + * + * @remarks + * + * **Important**: If you accept a credential configuration (credential JSON/File/Stream) from an external source for authentication to Google Cloud, you must validate it before providing it to any Google API or library. Providing an unvalidated credential configuration to Google APIs can compromise the security of your systems and data. For more information, refer to {@link https://cloud.google.com/docs/authentication/external/externally-sourced-credentials Validate credential configurations from external sources}. + */ + fromJSON(json) { + if (!json) { + throw new Error('Must pass in a JSON object containing the service account auth settings.'); + } + if (!json.client_email) { + throw new Error('The incoming JSON object does not contain a client_email field'); + } + if (!json.private_key) { + throw new Error('The incoming JSON object does not contain a private_key field'); + } + // Extract the relevant information from the json key file. + this.email = json.client_email; + this.key = json.private_key; + this.keyId = json.private_key_id; + this.projectId = json.project_id; + this.quotaProjectId = json.quota_project_id; + this.universeDomain = json.universe_domain || this.universeDomain; + } + fromStream(inputStream, callback) { + if (callback) { + this.fromStreamAsync(inputStream).then(() => callback(), callback); + } + else { + return this.fromStreamAsync(inputStream); + } + } + fromStreamAsync(inputStream) { + return new Promise((resolve, reject) => { + if (!inputStream) { + throw new Error('Must pass in a stream containing the service account auth settings.'); + } + let s = ''; + inputStream + .setEncoding('utf8') + .on('error', reject) + .on('data', chunk => (s += chunk)) + .on('end', () => { + try { + const data = JSON.parse(s); + this.fromJSON(data); + resolve(); + } + catch (e) { + reject(e); + } + }); + }); + } + /** + * Creates a JWT credentials instance using an API Key for authentication. + * @param apiKey The API Key in string form. + */ + fromAPIKey(apiKey) { + if (typeof apiKey !== 'string') { + throw new Error('Must provide an API Key string.'); + } + this.apiKey = apiKey; + } + /** + * Using the key or keyFile on the JWT client, obtain an object that contains + * the key and the client email. + */ + async getCredentials() { + if (this.key) { + return { private_key: this.key, client_email: this.email }; + } + else if (this.keyFile) { + const gtoken = this.createGToken(); + const creds = await gtoken.getCredentials(this.keyFile); + return { private_key: creds.privateKey, client_email: creds.clientEmail }; + } + throw new Error('A key or a keyFile must be provided to getCredentials.'); + } +} +exports.JWT = JWT; + + +/***/ }), + +/***/ 3882: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2014 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.LoginTicket = void 0; +class LoginTicket { + /** + * Create a simple class to extract user ID from an ID Token + * + * @param {string} env Envelope of the jwt + * @param {TokenPayload} pay Payload of the jwt + * @constructor + */ + constructor(env, pay) { + this.envelope = env; + this.payload = pay; + } + getEnvelope() { + return this.envelope; + } + getPayload() { + return this.payload; + } + /** + * Create a simple class to extract user ID from an ID Token + * + * @return The user ID + */ + getUserId() { + const payload = this.getPayload(); + if (payload && payload.sub) { + return payload.sub; + } + return null; + } + /** + * Returns attributes from the login ticket. This can contain + * various information about the user session. + * + * @return The envelope and payload + */ + getAttributes() { + return { envelope: this.getEnvelope(), payload: this.getPayload() }; + } +} +exports.LoginTicket = LoginTicket; + + +/***/ }), + +/***/ 91: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.OAuth2Client = exports.ClientAuthentication = exports.CertificateFormat = exports.CodeChallengeMethod = void 0; +const gaxios_1 = __nccwpck_require__(7003); +const querystring = __nccwpck_require__(3480); +const stream = __nccwpck_require__(2203); +const formatEcdsa = __nccwpck_require__(325); +const crypto_1 = __nccwpck_require__(8851); +const authclient_1 = __nccwpck_require__(4810); +const loginticket_1 = __nccwpck_require__(3882); +var CodeChallengeMethod; +(function (CodeChallengeMethod) { + CodeChallengeMethod["Plain"] = "plain"; + CodeChallengeMethod["S256"] = "S256"; +})(CodeChallengeMethod || (exports.CodeChallengeMethod = CodeChallengeMethod = {})); +var CertificateFormat; +(function (CertificateFormat) { + CertificateFormat["PEM"] = "PEM"; + CertificateFormat["JWK"] = "JWK"; +})(CertificateFormat || (exports.CertificateFormat = CertificateFormat = {})); +/** + * The client authentication type. Supported values are basic, post, and none. + * https://datatracker.ietf.org/doc/html/rfc7591#section-2 + */ +var ClientAuthentication; +(function (ClientAuthentication) { + ClientAuthentication["ClientSecretPost"] = "ClientSecretPost"; + ClientAuthentication["ClientSecretBasic"] = "ClientSecretBasic"; + ClientAuthentication["None"] = "None"; +})(ClientAuthentication || (exports.ClientAuthentication = ClientAuthentication = {})); +class OAuth2Client extends authclient_1.AuthClient { + constructor(optionsOrClientId, clientSecret, redirectUri) { + const opts = optionsOrClientId && typeof optionsOrClientId === 'object' + ? optionsOrClientId + : { clientId: optionsOrClientId, clientSecret, redirectUri }; + super(opts); + this.certificateCache = {}; + this.certificateExpiry = null; + this.certificateCacheFormat = CertificateFormat.PEM; + this.refreshTokenPromises = new Map(); + this._clientId = opts.clientId; + this._clientSecret = opts.clientSecret; + this.redirectUri = opts.redirectUri; + this.endpoints = { + tokenInfoUrl: 'https://oauth2.googleapis.com/tokeninfo', + oauth2AuthBaseUrl: 'https://accounts.google.com/o/oauth2/v2/auth', + oauth2TokenUrl: 'https://oauth2.googleapis.com/token', + oauth2RevokeUrl: 'https://oauth2.googleapis.com/revoke', + oauth2FederatedSignonPemCertsUrl: 'https://www.googleapis.com/oauth2/v1/certs', + oauth2FederatedSignonJwkCertsUrl: 'https://www.googleapis.com/oauth2/v3/certs', + oauth2IapPublicKeyUrl: 'https://www.gstatic.com/iap/verify/public_key', + ...opts.endpoints, + }; + this.clientAuthentication = + opts.clientAuthentication || ClientAuthentication.ClientSecretPost; + this.issuers = opts.issuers || [ + 'accounts.google.com', + 'https://accounts.google.com', + this.universeDomain, + ]; + } + /** + * Generates URL for consent page landing. + * @param opts Options. + * @return URL to consent page. + */ + generateAuthUrl(opts = {}) { + if (opts.code_challenge_method && !opts.code_challenge) { + throw new Error('If a code_challenge_method is provided, code_challenge must be included.'); + } + opts.response_type = opts.response_type || 'code'; + opts.client_id = opts.client_id || this._clientId; + opts.redirect_uri = opts.redirect_uri || this.redirectUri; + // Allow scopes to be passed either as array or a string + if (Array.isArray(opts.scope)) { + opts.scope = opts.scope.join(' '); + } + const rootUrl = this.endpoints.oauth2AuthBaseUrl.toString(); + return (rootUrl + + '?' + + querystring.stringify(opts)); + } + generateCodeVerifier() { + // To make the code compatible with browser SubtleCrypto we need to make + // this method async. + throw new Error('generateCodeVerifier is removed, please use generateCodeVerifierAsync instead.'); + } + /** + * Convenience method to automatically generate a code_verifier, and its + * resulting SHA256. If used, this must be paired with a S256 + * code_challenge_method. + * + * For a full example see: + * https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/oauth2-codeVerifier.js + */ + async generateCodeVerifierAsync() { + // base64 encoding uses 6 bits per character, and we want to generate128 + // characters. 6*128/8 = 96. + const crypto = (0, crypto_1.createCrypto)(); + const randomString = crypto.randomBytesBase64(96); + // The valid characters in the code_verifier are [A-Z]/[a-z]/[0-9]/ + // "-"/"."/"_"/"~". Base64 encoded strings are pretty close, so we're just + // swapping out a few chars. + const codeVerifier = randomString + .replace(/\+/g, '~') + .replace(/=/g, '_') + .replace(/\//g, '-'); + // Generate the base64 encoded SHA256 + const unencodedCodeChallenge = await crypto.sha256DigestBase64(codeVerifier); + // We need to use base64UrlEncoding instead of standard base64 + const codeChallenge = unencodedCodeChallenge + .split('=')[0] + .replace(/\+/g, '-') + .replace(/\//g, '_'); + return { codeVerifier, codeChallenge }; + } + getToken(codeOrOptions, callback) { + const options = typeof codeOrOptions === 'string' ? { code: codeOrOptions } : codeOrOptions; + if (callback) { + this.getTokenAsync(options).then(r => callback(null, r.tokens, r.res), e => callback(e, null, e.response)); + } + else { + return this.getTokenAsync(options); + } + } + async getTokenAsync(options) { + const url = this.endpoints.oauth2TokenUrl.toString(); + const headers = { + 'Content-Type': 'application/x-www-form-urlencoded', + }; + const values = { + client_id: options.client_id || this._clientId, + code_verifier: options.codeVerifier, + code: options.code, + grant_type: 'authorization_code', + redirect_uri: options.redirect_uri || this.redirectUri, + }; + if (this.clientAuthentication === ClientAuthentication.ClientSecretBasic) { + const basic = Buffer.from(`${this._clientId}:${this._clientSecret}`); + headers['Authorization'] = `Basic ${basic.toString('base64')}`; + } + if (this.clientAuthentication === ClientAuthentication.ClientSecretPost) { + values.client_secret = this._clientSecret; + } + const res = await this.transporter.request({ + ...OAuth2Client.RETRY_CONFIG, + method: 'POST', + url, + data: querystring.stringify(values), + headers, + }); + const tokens = res.data; + if (res.data && res.data.expires_in) { + tokens.expiry_date = new Date().getTime() + res.data.expires_in * 1000; + delete tokens.expires_in; + } + this.emit('tokens', tokens); + return { tokens, res }; + } + /** + * Refreshes the access token. + * @param refresh_token Existing refresh token. + * @private + */ + async refreshToken(refreshToken) { + if (!refreshToken) { + return this.refreshTokenNoCache(refreshToken); + } + // If a request to refresh using the same token has started, + // return the same promise. + if (this.refreshTokenPromises.has(refreshToken)) { + return this.refreshTokenPromises.get(refreshToken); + } + const p = this.refreshTokenNoCache(refreshToken).then(r => { + this.refreshTokenPromises.delete(refreshToken); + return r; + }, e => { + this.refreshTokenPromises.delete(refreshToken); + throw e; + }); + this.refreshTokenPromises.set(refreshToken, p); + return p; + } + async refreshTokenNoCache(refreshToken) { + var _a; + if (!refreshToken) { + throw new Error('No refresh token is set.'); + } + const url = this.endpoints.oauth2TokenUrl.toString(); + const data = { + refresh_token: refreshToken, + client_id: this._clientId, + client_secret: this._clientSecret, + grant_type: 'refresh_token', + }; + let res; + try { + // request for new token + res = await this.transporter.request({ + ...OAuth2Client.RETRY_CONFIG, + method: 'POST', + url, + data: querystring.stringify(data), + headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, + }); + } + catch (e) { + if (e instanceof gaxios_1.GaxiosError && + e.message === 'invalid_grant' && + ((_a = e.response) === null || _a === void 0 ? void 0 : _a.data) && + /ReAuth/i.test(e.response.data.error_description)) { + e.message = JSON.stringify(e.response.data); + } + throw e; + } + const tokens = res.data; + // TODO: de-duplicate this code from a few spots + if (res.data && res.data.expires_in) { + tokens.expiry_date = new Date().getTime() + res.data.expires_in * 1000; + delete tokens.expires_in; + } + this.emit('tokens', tokens); + return { tokens, res }; + } + refreshAccessToken(callback) { + if (callback) { + this.refreshAccessTokenAsync().then(r => callback(null, r.credentials, r.res), callback); + } + else { + return this.refreshAccessTokenAsync(); + } + } + async refreshAccessTokenAsync() { + const r = await this.refreshToken(this.credentials.refresh_token); + const tokens = r.tokens; + tokens.refresh_token = this.credentials.refresh_token; + this.credentials = tokens; + return { credentials: this.credentials, res: r.res }; + } + getAccessToken(callback) { + if (callback) { + this.getAccessTokenAsync().then(r => callback(null, r.token, r.res), callback); + } + else { + return this.getAccessTokenAsync(); + } + } + async getAccessTokenAsync() { + const shouldRefresh = !this.credentials.access_token || this.isTokenExpiring(); + if (shouldRefresh) { + if (!this.credentials.refresh_token) { + if (this.refreshHandler) { + const refreshedAccessToken = await this.processAndValidateRefreshHandler(); + if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { + this.setCredentials(refreshedAccessToken); + return { token: this.credentials.access_token }; + } + } + else { + throw new Error('No refresh token or refresh handler callback is set.'); + } + } + const r = await this.refreshAccessTokenAsync(); + if (!r.credentials || (r.credentials && !r.credentials.access_token)) { + throw new Error('Could not refresh access token.'); + } + return { token: r.credentials.access_token, res: r.res }; + } + else { + return { token: this.credentials.access_token }; + } + } + /** + * The main authentication interface. It takes an optional url which when + * present is the endpoint being accessed, and returns a Promise which + * resolves with authorization header fields. + * + * In OAuth2Client, the result has the form: + * { Authorization: 'Bearer ' } + * @param url The optional url being authorized + */ + async getRequestHeaders(url) { + const headers = (await this.getRequestMetadataAsync(url)).headers; + return headers; + } + async getRequestMetadataAsync( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + url) { + const thisCreds = this.credentials; + if (!thisCreds.access_token && + !thisCreds.refresh_token && + !this.apiKey && + !this.refreshHandler) { + throw new Error('No access, refresh token, API key or refresh handler callback is set.'); + } + if (thisCreds.access_token && !this.isTokenExpiring()) { + thisCreds.token_type = thisCreds.token_type || 'Bearer'; + const headers = { + Authorization: thisCreds.token_type + ' ' + thisCreds.access_token, + }; + return { headers: this.addSharedMetadataHeaders(headers) }; + } + // If refreshHandler exists, call processAndValidateRefreshHandler(). + if (this.refreshHandler) { + const refreshedAccessToken = await this.processAndValidateRefreshHandler(); + if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { + this.setCredentials(refreshedAccessToken); + const headers = { + Authorization: 'Bearer ' + this.credentials.access_token, + }; + return { headers: this.addSharedMetadataHeaders(headers) }; + } + } + if (this.apiKey) { + return { headers: { 'X-Goog-Api-Key': this.apiKey } }; + } + let r = null; + let tokens = null; + try { + r = await this.refreshToken(thisCreds.refresh_token); + tokens = r.tokens; + } + catch (err) { + const e = err; + if (e.response && + (e.response.status === 403 || e.response.status === 404)) { + e.message = `Could not refresh access token: ${e.message}`; + } + throw e; + } + const credentials = this.credentials; + credentials.token_type = credentials.token_type || 'Bearer'; + tokens.refresh_token = credentials.refresh_token; + this.credentials = tokens; + const headers = { + Authorization: credentials.token_type + ' ' + tokens.access_token, + }; + return { headers: this.addSharedMetadataHeaders(headers), res: r.res }; + } + /** + * Generates an URL to revoke the given token. + * @param token The existing token to be revoked. + * + * @deprecated use instance method {@link OAuth2Client.getRevokeTokenURL} + */ + static getRevokeTokenUrl(token) { + return new OAuth2Client().getRevokeTokenURL(token).toString(); + } + /** + * Generates a URL to revoke the given token. + * + * @param token The existing token to be revoked. + */ + getRevokeTokenURL(token) { + const url = new URL(this.endpoints.oauth2RevokeUrl); + url.searchParams.append('token', token); + return url; + } + revokeToken(token, callback) { + const opts = { + ...OAuth2Client.RETRY_CONFIG, + url: this.getRevokeTokenURL(token).toString(), + method: 'POST', + }; + if (callback) { + this.transporter + .request(opts) + .then(r => callback(null, r), callback); + } + else { + return this.transporter.request(opts); + } + } + revokeCredentials(callback) { + if (callback) { + this.revokeCredentialsAsync().then(res => callback(null, res), callback); + } + else { + return this.revokeCredentialsAsync(); + } + } + async revokeCredentialsAsync() { + const token = this.credentials.access_token; + this.credentials = {}; + if (token) { + return this.revokeToken(token); + } + else { + throw new Error('No access token to revoke.'); + } + } + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); + }); + } + else { + return this.requestAsync(opts); + } + } + async requestAsync(opts, reAuthRetried = false) { + let r2; + try { + const r = await this.getRequestMetadataAsync(opts.url); + opts.headers = opts.headers || {}; + if (r.headers && r.headers['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = r.headers['x-goog-user-project']; + } + if (r.headers && r.headers.Authorization) { + opts.headers.Authorization = r.headers.Authorization; + } + if (this.apiKey) { + opts.headers['X-Goog-Api-Key'] = this.apiKey; + } + r2 = await this.transporter.request(opts); + } + catch (e) { + const res = e.response; + if (res) { + const statusCode = res.status; + // Retry the request for metadata if the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - An access_token and refresh_token were available, but either no + // expiry_date was available or the forceRefreshOnFailure flag is set. + // The absent expiry_date case can happen when developers stash the + // access_token and refresh_token for later use, but the access_token + // fails on the first try because it's expired. Some developers may + // choose to enable forceRefreshOnFailure to mitigate time-related + // errors. + // Or the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - No refresh_token was available + // - An access_token and a refreshHandler callback were available, but + // either no expiry_date was available or the forceRefreshOnFailure + // flag is set. The access_token fails on the first try because it's + // expired. Some developers may choose to enable forceRefreshOnFailure + // to mitigate time-related errors. + const mayRequireRefresh = this.credentials && + this.credentials.access_token && + this.credentials.refresh_token && + (!this.credentials.expiry_date || this.forceRefreshOnFailure); + const mayRequireRefreshWithNoRefreshToken = this.credentials && + this.credentials.access_token && + !this.credentials.refresh_token && + (!this.credentials.expiry_date || this.forceRefreshOnFailure) && + this.refreshHandler; + const isReadableStream = res.config.data instanceof stream.Readable; + const isAuthErr = statusCode === 401 || statusCode === 403; + if (!reAuthRetried && + isAuthErr && + !isReadableStream && + mayRequireRefresh) { + await this.refreshAccessTokenAsync(); + return this.requestAsync(opts, true); + } + else if (!reAuthRetried && + isAuthErr && + !isReadableStream && + mayRequireRefreshWithNoRefreshToken) { + const refreshedAccessToken = await this.processAndValidateRefreshHandler(); + if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { + this.setCredentials(refreshedAccessToken); + } + return this.requestAsync(opts, true); + } + } + throw e; + } + return r2; + } + verifyIdToken(options, callback) { + // This function used to accept two arguments instead of an options object. + // Check the types to help users upgrade with less pain. + // This check can be removed after a 2.0 release. + if (callback && typeof callback !== 'function') { + throw new Error('This method accepts an options object as the first parameter, which includes the idToken, audience, and maxExpiry.'); + } + if (callback) { + this.verifyIdTokenAsync(options).then(r => callback(null, r), callback); + } + else { + return this.verifyIdTokenAsync(options); + } + } + async verifyIdTokenAsync(options) { + if (!options.idToken) { + throw new Error('The verifyIdToken method requires an ID Token'); + } + const response = await this.getFederatedSignonCertsAsync(); + const login = await this.verifySignedJwtWithCertsAsync(options.idToken, response.certs, options.audience, this.issuers, options.maxExpiry); + return login; + } + /** + * Obtains information about the provisioned access token. Especially useful + * if you want to check the scopes that were provisioned to a given token. + * + * @param accessToken Required. The Access Token for which you want to get + * user info. + */ + async getTokenInfo(accessToken) { + const { data } = await this.transporter.request({ + ...OAuth2Client.RETRY_CONFIG, + method: 'POST', + headers: { + 'Content-Type': 'application/x-www-form-urlencoded', + Authorization: `Bearer ${accessToken}`, + }, + url: this.endpoints.tokenInfoUrl.toString(), + }); + const info = Object.assign({ + expiry_date: new Date().getTime() + data.expires_in * 1000, + scopes: data.scope.split(' '), + }, data); + delete info.expires_in; + delete info.scope; + return info; + } + getFederatedSignonCerts(callback) { + if (callback) { + this.getFederatedSignonCertsAsync().then(r => callback(null, r.certs, r.res), callback); + } + else { + return this.getFederatedSignonCertsAsync(); + } + } + async getFederatedSignonCertsAsync() { + const nowTime = new Date().getTime(); + const format = (0, crypto_1.hasBrowserCrypto)() + ? CertificateFormat.JWK + : CertificateFormat.PEM; + if (this.certificateExpiry && + nowTime < this.certificateExpiry.getTime() && + this.certificateCacheFormat === format) { + return { certs: this.certificateCache, format }; + } + let res; + let url; + switch (format) { + case CertificateFormat.PEM: + url = this.endpoints.oauth2FederatedSignonPemCertsUrl.toString(); + break; + case CertificateFormat.JWK: + url = this.endpoints.oauth2FederatedSignonJwkCertsUrl.toString(); + break; + default: + throw new Error(`Unsupported certificate format ${format}`); + } + try { + res = await this.transporter.request({ + ...OAuth2Client.RETRY_CONFIG, + url, + }); + } + catch (e) { + if (e instanceof Error) { + e.message = `Failed to retrieve verification certificates: ${e.message}`; + } + throw e; + } + const cacheControl = res ? res.headers['cache-control'] : undefined; + let cacheAge = -1; + if (cacheControl) { + const pattern = new RegExp('max-age=([0-9]*)'); + const regexResult = pattern.exec(cacheControl); + if (regexResult && regexResult.length === 2) { + // Cache results with max-age (in seconds) + cacheAge = Number(regexResult[1]) * 1000; // milliseconds + } + } + let certificates = {}; + switch (format) { + case CertificateFormat.PEM: + certificates = res.data; + break; + case CertificateFormat.JWK: + for (const key of res.data.keys) { + certificates[key.kid] = key; + } + break; + default: + throw new Error(`Unsupported certificate format ${format}`); + } + const now = new Date(); + this.certificateExpiry = + cacheAge === -1 ? null : new Date(now.getTime() + cacheAge); + this.certificateCache = certificates; + this.certificateCacheFormat = format; + return { certs: certificates, format, res }; + } + getIapPublicKeys(callback) { + if (callback) { + this.getIapPublicKeysAsync().then(r => callback(null, r.pubkeys, r.res), callback); + } + else { + return this.getIapPublicKeysAsync(); + } + } + async getIapPublicKeysAsync() { + let res; + const url = this.endpoints.oauth2IapPublicKeyUrl.toString(); + try { + res = await this.transporter.request({ + ...OAuth2Client.RETRY_CONFIG, + url, + }); + } + catch (e) { + if (e instanceof Error) { + e.message = `Failed to retrieve verification certificates: ${e.message}`; + } + throw e; + } + return { pubkeys: res.data, res }; + } + verifySignedJwtWithCerts() { + // To make the code compatible with browser SubtleCrypto we need to make + // this method async. + throw new Error('verifySignedJwtWithCerts is removed, please use verifySignedJwtWithCertsAsync instead.'); + } + /** + * Verify the id token is signed with the correct certificate + * and is from the correct audience. + * @param jwt The jwt to verify (The ID Token in this case). + * @param certs The array of certs to test the jwt against. + * @param requiredAudience The audience to test the jwt against. + * @param issuers The allowed issuers of the jwt (Optional). + * @param maxExpiry The max expiry the certificate can be (Optional). + * @return Returns a promise resolving to LoginTicket on verification. + */ + async verifySignedJwtWithCertsAsync(jwt, certs, requiredAudience, issuers, maxExpiry) { + const crypto = (0, crypto_1.createCrypto)(); + if (!maxExpiry) { + maxExpiry = OAuth2Client.DEFAULT_MAX_TOKEN_LIFETIME_SECS_; + } + const segments = jwt.split('.'); + if (segments.length !== 3) { + throw new Error('Wrong number of segments in token: ' + jwt); + } + const signed = segments[0] + '.' + segments[1]; + let signature = segments[2]; + let envelope; + let payload; + try { + envelope = JSON.parse(crypto.decodeBase64StringUtf8(segments[0])); + } + catch (err) { + if (err instanceof Error) { + err.message = `Can't parse token envelope: ${segments[0]}': ${err.message}`; + } + throw err; + } + if (!envelope) { + throw new Error("Can't parse token envelope: " + segments[0]); + } + try { + payload = JSON.parse(crypto.decodeBase64StringUtf8(segments[1])); + } + catch (err) { + if (err instanceof Error) { + err.message = `Can't parse token payload '${segments[0]}`; + } + throw err; + } + if (!payload) { + throw new Error("Can't parse token payload: " + segments[1]); + } + if (!Object.prototype.hasOwnProperty.call(certs, envelope.kid)) { + // If this is not present, then there's no reason to attempt verification + throw new Error('No pem found for envelope: ' + JSON.stringify(envelope)); + } + const cert = certs[envelope.kid]; + if (envelope.alg === 'ES256') { + signature = formatEcdsa.joseToDer(signature, 'ES256').toString('base64'); + } + const verified = await crypto.verify(cert, signed, signature); + if (!verified) { + throw new Error('Invalid token signature: ' + jwt); + } + if (!payload.iat) { + throw new Error('No issue time in token: ' + JSON.stringify(payload)); + } + if (!payload.exp) { + throw new Error('No expiration time in token: ' + JSON.stringify(payload)); + } + const iat = Number(payload.iat); + if (isNaN(iat)) + throw new Error('iat field using invalid format'); + const exp = Number(payload.exp); + if (isNaN(exp)) + throw new Error('exp field using invalid format'); + const now = new Date().getTime() / 1000; + if (exp >= now + maxExpiry) { + throw new Error('Expiration time too far in future: ' + JSON.stringify(payload)); + } + const earliest = iat - OAuth2Client.CLOCK_SKEW_SECS_; + const latest = exp + OAuth2Client.CLOCK_SKEW_SECS_; + if (now < earliest) { + throw new Error('Token used too early, ' + + now + + ' < ' + + earliest + + ': ' + + JSON.stringify(payload)); + } + if (now > latest) { + throw new Error('Token used too late, ' + + now + + ' > ' + + latest + + ': ' + + JSON.stringify(payload)); + } + if (issuers && issuers.indexOf(payload.iss) < 0) { + throw new Error('Invalid issuer, expected one of [' + + issuers + + '], but got ' + + payload.iss); + } + // Check the audience matches if we have one + if (typeof requiredAudience !== 'undefined' && requiredAudience !== null) { + const aud = payload.aud; + let audVerified = false; + // If the requiredAudience is an array, check if it contains token + // audience + if (requiredAudience.constructor === Array) { + audVerified = requiredAudience.indexOf(aud) > -1; + } + else { + audVerified = aud === requiredAudience; + } + if (!audVerified) { + throw new Error('Wrong recipient, payload audience != requiredAudience'); + } + } + return new loginticket_1.LoginTicket(envelope, payload); + } + /** + * Returns a promise that resolves with AccessTokenResponse type if + * refreshHandler is defined. + * If not, nothing is returned. + */ + async processAndValidateRefreshHandler() { + if (this.refreshHandler) { + const accessTokenResponse = await this.refreshHandler(); + if (!accessTokenResponse.access_token) { + throw new Error('No access token is returned by the refreshHandler callback.'); + } + return accessTokenResponse; + } + return; + } + /** + * Returns true if a token is expired or will expire within + * eagerRefreshThresholdMillismilliseconds. + * If there is no expiry time, assumes the token is not expired or expiring. + */ + isTokenExpiring() { + const expiryDate = this.credentials.expiry_date; + return expiryDate + ? expiryDate <= new Date().getTime() + this.eagerRefreshThresholdMillis + : false; + } +} +exports.OAuth2Client = OAuth2Client; +/** + * @deprecated use instance's {@link OAuth2Client.endpoints} + */ +OAuth2Client.GOOGLE_TOKEN_INFO_URL = 'https://oauth2.googleapis.com/tokeninfo'; +/** + * Clock skew - five minutes in seconds + */ +OAuth2Client.CLOCK_SKEW_SECS_ = 300; +/** + * The default max Token Lifetime is one day in seconds + */ +OAuth2Client.DEFAULT_MAX_TOKEN_LIFETIME_SECS_ = 86400; + + +/***/ }), + +/***/ 6653: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.OAuthClientAuthHandler = void 0; +exports.getErrorFromOAuthErrorResponse = getErrorFromOAuthErrorResponse; +const querystring = __nccwpck_require__(3480); +const crypto_1 = __nccwpck_require__(8851); +/** List of HTTP methods that accept request bodies. */ +const METHODS_SUPPORTING_REQUEST_BODY = ['PUT', 'POST', 'PATCH']; +/** + * Abstract class for handling client authentication in OAuth-based + * operations. + * When request-body client authentication is used, only application/json and + * application/x-www-form-urlencoded content types for HTTP methods that support + * request bodies are supported. + */ +class OAuthClientAuthHandler { + /** + * Instantiates an OAuth client authentication handler. + * @param clientAuthentication The client auth credentials. + */ + constructor(clientAuthentication) { + this.clientAuthentication = clientAuthentication; + this.crypto = (0, crypto_1.createCrypto)(); + } + /** + * Applies client authentication on the OAuth request's headers or POST + * body but does not process the request. + * @param opts The GaxiosOptions whose headers or data are to be modified + * depending on the client authentication mechanism to be used. + * @param bearerToken The optional bearer token to use for authentication. + * When this is used, no client authentication credentials are needed. + */ + applyClientAuthenticationOptions(opts, bearerToken) { + // Inject authenticated header. + this.injectAuthenticatedHeaders(opts, bearerToken); + // Inject authenticated request body. + if (!bearerToken) { + this.injectAuthenticatedRequestBody(opts); + } + } + /** + * Applies client authentication on the request's header if either + * basic authentication or bearer token authentication is selected. + * + * @param opts The GaxiosOptions whose headers or data are to be modified + * depending on the client authentication mechanism to be used. + * @param bearerToken The optional bearer token to use for authentication. + * When this is used, no client authentication credentials are needed. + */ + injectAuthenticatedHeaders(opts, bearerToken) { + var _a; + // Bearer token prioritized higher than basic Auth. + if (bearerToken) { + opts.headers = opts.headers || {}; + Object.assign(opts.headers, { + Authorization: `Bearer ${bearerToken}}`, + }); + } + else if (((_a = this.clientAuthentication) === null || _a === void 0 ? void 0 : _a.confidentialClientType) === 'basic') { + opts.headers = opts.headers || {}; + const clientId = this.clientAuthentication.clientId; + const clientSecret = this.clientAuthentication.clientSecret || ''; + const base64EncodedCreds = this.crypto.encodeBase64StringUtf8(`${clientId}:${clientSecret}`); + Object.assign(opts.headers, { + Authorization: `Basic ${base64EncodedCreds}`, + }); + } + } + /** + * Applies client authentication on the request's body if request-body + * client authentication is selected. + * + * @param opts The GaxiosOptions whose headers or data are to be modified + * depending on the client authentication mechanism to be used. + */ + injectAuthenticatedRequestBody(opts) { + var _a; + if (((_a = this.clientAuthentication) === null || _a === void 0 ? void 0 : _a.confidentialClientType) === 'request-body') { + const method = (opts.method || 'GET').toUpperCase(); + // Inject authenticated request body. + if (METHODS_SUPPORTING_REQUEST_BODY.indexOf(method) !== -1) { + // Get content-type. + let contentType; + const headers = opts.headers || {}; + for (const key in headers) { + if (key.toLowerCase() === 'content-type' && headers[key]) { + contentType = headers[key].toLowerCase(); + break; + } + } + if (contentType === 'application/x-www-form-urlencoded') { + opts.data = opts.data || ''; + const data = querystring.parse(opts.data); + Object.assign(data, { + client_id: this.clientAuthentication.clientId, + client_secret: this.clientAuthentication.clientSecret || '', + }); + opts.data = querystring.stringify(data); + } + else if (contentType === 'application/json') { + opts.data = opts.data || {}; + Object.assign(opts.data, { + client_id: this.clientAuthentication.clientId, + client_secret: this.clientAuthentication.clientSecret || '', + }); + } + else { + throw new Error(`${contentType} content-types are not supported with ` + + `${this.clientAuthentication.confidentialClientType} ` + + 'client authentication'); + } + } + else { + throw new Error(`${method} HTTP method does not support ` + + `${this.clientAuthentication.confidentialClientType} ` + + 'client authentication'); + } + } + } + /** + * Retry config for Auth-related requests. + * + * @remarks + * + * This is not a part of the default {@link AuthClient.transporter transporter/gaxios} + * config as some downstream APIs would prefer if customers explicitly enable retries, + * such as GCS. + */ + static get RETRY_CONFIG() { + return { + retry: true, + retryConfig: { + httpMethodsToRetry: ['GET', 'PUT', 'POST', 'HEAD', 'OPTIONS', 'DELETE'], + }, + }; + } +} +exports.OAuthClientAuthHandler = OAuthClientAuthHandler; +/** + * Converts an OAuth error response to a native JavaScript Error. + * @param resp The OAuth error response to convert to a native Error object. + * @param err The optional original error. If provided, the error properties + * will be copied to the new error. + * @return The converted native Error object. + */ +function getErrorFromOAuthErrorResponse(resp, err) { + // Error response. + const errorCode = resp.error; + const errorDescription = resp.error_description; + const errorUri = resp.error_uri; + let message = `Error code ${errorCode}`; + if (typeof errorDescription !== 'undefined') { + message += `: ${errorDescription}`; + } + if (typeof errorUri !== 'undefined') { + message += ` - ${errorUri}`; + } + const newError = new Error(message); + // Copy properties from original error to newly generated error. + if (err) { + const keys = Object.keys(err); + if (err.stack) { + // Copy error.stack if available. + keys.push('stack'); + } + keys.forEach(key => { + // Do not overwrite the message field. + if (key !== 'message') { + Object.defineProperty(newError, key, { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + value: err[key], + writable: false, + enumerable: true, + }); + } + }); + } + return newError; +} + + +/***/ }), + +/***/ 2045: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PassThroughClient = void 0; +const authclient_1 = __nccwpck_require__(4810); +/** + * An AuthClient without any Authentication information. Useful for: + * - Anonymous access + * - Local Emulators + * - Testing Environments + * + */ +class PassThroughClient extends authclient_1.AuthClient { + /** + * Creates a request without any authentication headers or checks. + * + * @remarks + * + * In testing environments it may be useful to change the provided + * {@link AuthClient.transporter} for any desired request overrides/handling. + * + * @param opts + * @returns The response of the request. + */ + async request(opts) { + return this.transporter.request(opts); + } + /** + * A required method of the base class. + * Always will return an empty object. + * + * @returns {} + */ + async getAccessToken() { + return {}; + } + /** + * A required method of the base class. + * Always will return an empty object. + * + * @returns {} + */ + async getRequestHeaders() { + return {}; + } +} +exports.PassThroughClient = PassThroughClient; +const a = new PassThroughClient(); +a.getAccessToken(); + + +/***/ }), + +/***/ 6077: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PluggableAuthClient = exports.ExecutableError = void 0; +const baseexternalclient_1 = __nccwpck_require__(142); +const executable_response_1 = __nccwpck_require__(3247); +const pluggable_auth_handler_1 = __nccwpck_require__(908); +/** + * Error thrown from the executable run by PluggableAuthClient. + */ +class ExecutableError extends Error { + constructor(message, code) { + super(`The executable failed with exit code: ${code} and error message: ${message}.`); + this.code = code; + Object.setPrototypeOf(this, new.target.prototype); + } +} +exports.ExecutableError = ExecutableError; +/** + * The default executable timeout when none is provided, in milliseconds. + */ +const DEFAULT_EXECUTABLE_TIMEOUT_MILLIS = 30 * 1000; +/** + * The minimum allowed executable timeout in milliseconds. + */ +const MINIMUM_EXECUTABLE_TIMEOUT_MILLIS = 5 * 1000; +/** + * The maximum allowed executable timeout in milliseconds. + */ +const MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS = 120 * 1000; +/** + * The environment variable to check to see if executable can be run. + * Value must be set to '1' for the executable to run. + */ +const GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES = 'GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES'; +/** + * The maximum currently supported executable version. + */ +const MAXIMUM_EXECUTABLE_VERSION = 1; +/** + * PluggableAuthClient enables the exchange of workload identity pool external credentials for + * Google access tokens by retrieving 3rd party tokens through a user supplied executable. These + * scripts/executables are completely independent of the Google Cloud Auth libraries. These + * credentials plug into ADC and will call the specified executable to retrieve the 3rd party token + * to be exchanged for a Google access token. + * + *

To use these credentials, the GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment variable + * must be set to '1'. This is for security reasons. + * + *

Both OIDC and SAML are supported. The executable must adhere to a specific response format + * defined below. + * + *

The executable must print out the 3rd party token to STDOUT in JSON format. When an + * output_file is specified in the credential configuration, the executable must also handle writing the + * JSON response to this file. + * + *

+ * OIDC response sample:
+ * {
+ *   "version": 1,
+ *   "success": true,
+ *   "token_type": "urn:ietf:params:oauth:token-type:id_token",
+ *   "id_token": "HEADER.PAYLOAD.SIGNATURE",
+ *   "expiration_time": 1620433341
+ * }
+ *
+ * SAML2 response sample:
+ * {
+ *   "version": 1,
+ *   "success": true,
+ *   "token_type": "urn:ietf:params:oauth:token-type:saml2",
+ *   "saml_response": "...",
+ *   "expiration_time": 1620433341
+ * }
+ *
+ * Error response sample:
+ * {
+ *   "version": 1,
+ *   "success": false,
+ *   "code": "401",
+ *   "message": "Error message."
+ * }
+ * 
+ * + *

The "expiration_time" field in the JSON response is only required for successful + * responses when an output file was specified in the credential configuration + * + *

The auth libraries will populate certain environment variables that will be accessible by the + * executable, such as: GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE, GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE, + * GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE, GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL, and + * GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE. + * + *

Please see this repositories README for a complete executable request/response specification. + */ +class PluggableAuthClient extends baseexternalclient_1.BaseExternalAccountClient { + /** + * Instantiates a PluggableAuthClient instance using the provided JSON + * object loaded from an external account credentials file. + * An error is thrown if the credential is not a valid pluggable auth credential. + * @param options The external account options object typically loaded from + * the external account JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + super(options, additionalOptions); + if (!options.credential_source.executable) { + throw new Error('No valid Pluggable Auth "credential_source" provided.'); + } + this.command = options.credential_source.executable.command; + if (!this.command) { + throw new Error('No valid Pluggable Auth "credential_source" provided.'); + } + // Check if the provided timeout exists and if it is valid. + if (options.credential_source.executable.timeout_millis === undefined) { + this.timeoutMillis = DEFAULT_EXECUTABLE_TIMEOUT_MILLIS; + } + else { + this.timeoutMillis = options.credential_source.executable.timeout_millis; + if (this.timeoutMillis < MINIMUM_EXECUTABLE_TIMEOUT_MILLIS || + this.timeoutMillis > MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS) { + throw new Error(`Timeout must be between ${MINIMUM_EXECUTABLE_TIMEOUT_MILLIS} and ` + + `${MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS} milliseconds.`); + } + } + this.outputFile = options.credential_source.executable.output_file; + this.handler = new pluggable_auth_handler_1.PluggableAuthHandler({ + command: this.command, + timeoutMillis: this.timeoutMillis, + outputFile: this.outputFile, + }); + this.credentialSourceType = 'executable'; + } + /** + * Triggered when an external subject token is needed to be exchanged for a + * GCP access token via GCP STS endpoint. + * This uses the `options.credential_source` object to figure out how + * to retrieve the token using the current environment. In this case, + * this calls a user provided executable which returns the subject token. + * The logic is summarized as: + * 1. Validated that the executable is allowed to run. The + * GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment must be set to + * 1 for security reasons. + * 2. If an output file is specified by the user, check the file location + * for a response. If the file exists and contains a valid response, + * return the subject token from the file. + * 3. Call the provided executable and return response. + * @return A promise that resolves with the external subject token. + */ + async retrieveSubjectToken() { + // Check if the executable is allowed to run. + if (process.env[GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES] !== '1') { + throw new Error('Pluggable Auth executables need to be explicitly allowed to run by ' + + 'setting the GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment ' + + 'Variable to 1.'); + } + let executableResponse = undefined; + // Try to get cached executable response from output file. + if (this.outputFile) { + executableResponse = await this.handler.retrieveCachedResponse(); + } + // If no response from output file, call the executable. + if (!executableResponse) { + // Set up environment map with required values for the executable. + const envMap = new Map(); + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE', this.audience); + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE', this.subjectTokenType); + // Always set to 0 because interactive mode is not supported. + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE', '0'); + if (this.outputFile) { + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE', this.outputFile); + } + const serviceAccountEmail = this.getServiceAccountEmail(); + if (serviceAccountEmail) { + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL', serviceAccountEmail); + } + executableResponse = + await this.handler.retrieveResponseFromExecutable(envMap); + } + if (executableResponse.version > MAXIMUM_EXECUTABLE_VERSION) { + throw new Error(`Version of executable is not currently supported, maximum supported version is ${MAXIMUM_EXECUTABLE_VERSION}.`); + } + // Check that response was successful. + if (!executableResponse.success) { + throw new ExecutableError(executableResponse.errorMessage, executableResponse.errorCode); + } + // Check that response contains expiration time if output file was specified. + if (this.outputFile) { + if (!executableResponse.expirationTime) { + throw new executable_response_1.InvalidExpirationTimeFieldError('The executable response must contain the `expiration_time` field for successful responses when an output_file has been specified in the configuration.'); + } + } + // Check that response is not expired. + if (executableResponse.isExpired()) { + throw new Error('Executable response is expired.'); + } + // Return subject token from response. + return executableResponse.subjectToken; + } +} +exports.PluggableAuthClient = PluggableAuthClient; + + +/***/ }), + +/***/ 908: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PluggableAuthHandler = void 0; +const pluggable_auth_client_1 = __nccwpck_require__(6077); +const executable_response_1 = __nccwpck_require__(3247); +const childProcess = __nccwpck_require__(5317); +const fs = __nccwpck_require__(9896); +/** + * A handler used to retrieve 3rd party token responses from user defined + * executables and cached file output for the PluggableAuthClient class. + */ +class PluggableAuthHandler { + /** + * Instantiates a PluggableAuthHandler instance using the provided + * PluggableAuthHandlerOptions object. + */ + constructor(options) { + if (!options.command) { + throw new Error('No command provided.'); + } + this.commandComponents = PluggableAuthHandler.parseCommand(options.command); + this.timeoutMillis = options.timeoutMillis; + if (!this.timeoutMillis) { + throw new Error('No timeoutMillis provided.'); + } + this.outputFile = options.outputFile; + } + /** + * Calls user provided executable to get a 3rd party subject token and + * returns the response. + * @param envMap a Map of additional Environment Variables required for + * the executable. + * @return A promise that resolves with the executable response. + */ + retrieveResponseFromExecutable(envMap) { + return new Promise((resolve, reject) => { + // Spawn process to run executable using added environment variables. + const child = childProcess.spawn(this.commandComponents[0], this.commandComponents.slice(1), { + env: { ...process.env, ...Object.fromEntries(envMap) }, + }); + let output = ''; + // Append stdout to output as executable runs. + child.stdout.on('data', (data) => { + output += data; + }); + // Append stderr as executable runs. + child.stderr.on('data', (err) => { + output += err; + }); + // Set up a timeout to end the child process and throw an error. + const timeout = setTimeout(() => { + // Kill child process and remove listeners so 'close' event doesn't get + // read after child process is killed. + child.removeAllListeners(); + child.kill(); + return reject(new Error('The executable failed to finish within the timeout specified.')); + }, this.timeoutMillis); + child.on('close', (code) => { + // Cancel timeout if executable closes before timeout is reached. + clearTimeout(timeout); + if (code === 0) { + // If the executable completed successfully, try to return the parsed response. + try { + const responseJson = JSON.parse(output); + const response = new executable_response_1.ExecutableResponse(responseJson); + return resolve(response); + } + catch (error) { + if (error instanceof executable_response_1.ExecutableResponseError) { + return reject(error); + } + return reject(new executable_response_1.ExecutableResponseError(`The executable returned an invalid response: ${output}`)); + } + } + else { + return reject(new pluggable_auth_client_1.ExecutableError(output, code.toString())); + } + }); + }); + } + /** + * Checks user provided output file for response from previous run of + * executable and return the response if it exists, is formatted correctly, and is not expired. + */ + async retrieveCachedResponse() { + if (!this.outputFile || this.outputFile.length === 0) { + return undefined; + } + let filePath; + try { + filePath = await fs.promises.realpath(this.outputFile); + } + catch (_a) { + // If file path cannot be resolved, return undefined. + return undefined; + } + if (!(await fs.promises.lstat(filePath)).isFile()) { + // If path does not lead to file, return undefined. + return undefined; + } + const responseString = await fs.promises.readFile(filePath, { + encoding: 'utf8', + }); + if (responseString === '') { + return undefined; + } + try { + const responseJson = JSON.parse(responseString); + const response = new executable_response_1.ExecutableResponse(responseJson); + // Check if response is successful and unexpired. + if (response.isValid()) { + return new executable_response_1.ExecutableResponse(responseJson); + } + return undefined; + } + catch (error) { + if (error instanceof executable_response_1.ExecutableResponseError) { + throw error; + } + throw new executable_response_1.ExecutableResponseError(`The output file contained an invalid response: ${responseString}`); + } + } + /** + * Parses given command string into component array, splitting on spaces unless + * spaces are between quotation marks. + */ + static parseCommand(command) { + // Split the command into components by splitting on spaces, + // unless spaces are contained in quotation marks. + const components = command.match(/(?:[^\s"]+|"[^"]*")+/g); + if (!components) { + throw new Error(`Provided command: "${command}" could not be parsed.`); + } + // Remove quotation marks from the beginning and end of each component if they are present. + for (let i = 0; i < components.length; i++) { + if (components[i][0] === '"' && components[i].slice(-1) === '"') { + components[i] = components[i].slice(1, -1); + } + } + return components; + } +} +exports.PluggableAuthHandler = PluggableAuthHandler; + + +/***/ }), + +/***/ 9807: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2015 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.UserRefreshClient = exports.USER_REFRESH_ACCOUNT_TYPE = void 0; +const oauth2client_1 = __nccwpck_require__(91); +const querystring_1 = __nccwpck_require__(3480); +exports.USER_REFRESH_ACCOUNT_TYPE = 'authorized_user'; +class UserRefreshClient extends oauth2client_1.OAuth2Client { + constructor(optionsOrClientId, clientSecret, refreshToken, eagerRefreshThresholdMillis, forceRefreshOnFailure) { + const opts = optionsOrClientId && typeof optionsOrClientId === 'object' + ? optionsOrClientId + : { + clientId: optionsOrClientId, + clientSecret, + refreshToken, + eagerRefreshThresholdMillis, + forceRefreshOnFailure, + }; + super(opts); + this._refreshToken = opts.refreshToken; + this.credentials.refresh_token = opts.refreshToken; + } + /** + * Refreshes the access token. + * @param refreshToken An ignored refreshToken.. + * @param callback Optional callback. + */ + async refreshTokenNoCache( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + refreshToken) { + return super.refreshTokenNoCache(this._refreshToken); + } + async fetchIdToken(targetAudience) { + const res = await this.transporter.request({ + ...UserRefreshClient.RETRY_CONFIG, + url: this.endpoints.oauth2TokenUrl, + headers: { + 'Content-Type': 'application/x-www-form-urlencoded', + }, + method: 'POST', + data: (0, querystring_1.stringify)({ + client_id: this._clientId, + client_secret: this._clientSecret, + grant_type: 'refresh_token', + refresh_token: this._refreshToken, + target_audience: targetAudience, + }), + }); + return res.data.id_token; + } + /** + * Create a UserRefreshClient credentials instance using the given input + * options. + * @param json The input object. + */ + fromJSON(json) { + if (!json) { + throw new Error('Must pass in a JSON object containing the user refresh token'); + } + if (json.type !== 'authorized_user') { + throw new Error('The incoming JSON object does not have the "authorized_user" type'); + } + if (!json.client_id) { + throw new Error('The incoming JSON object does not contain a client_id field'); + } + if (!json.client_secret) { + throw new Error('The incoming JSON object does not contain a client_secret field'); + } + if (!json.refresh_token) { + throw new Error('The incoming JSON object does not contain a refresh_token field'); + } + this._clientId = json.client_id; + this._clientSecret = json.client_secret; + this._refreshToken = json.refresh_token; + this.credentials.refresh_token = json.refresh_token; + this.quotaProjectId = json.quota_project_id; + this.universeDomain = json.universe_domain || this.universeDomain; + } + fromStream(inputStream, callback) { + if (callback) { + this.fromStreamAsync(inputStream).then(() => callback(), callback); + } + else { + return this.fromStreamAsync(inputStream); + } + } + async fromStreamAsync(inputStream) { + return new Promise((resolve, reject) => { + if (!inputStream) { + return reject(new Error('Must pass in a stream containing the user refresh token.')); + } + let s = ''; + inputStream + .setEncoding('utf8') + .on('error', reject) + .on('data', chunk => (s += chunk)) + .on('end', () => { + try { + const data = JSON.parse(s); + this.fromJSON(data); + return resolve(); + } + catch (err) { + return reject(err); + } + }); + }); + } + /** + * Create a UserRefreshClient credentials instance using the given input + * options. + * @param json The input object. + */ + static fromJSON(json) { + const client = new UserRefreshClient(); + client.fromJSON(json); + return client; + } +} +exports.UserRefreshClient = UserRefreshClient; + + +/***/ }), + +/***/ 121: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.StsCredentials = void 0; +const gaxios_1 = __nccwpck_require__(7003); +const querystring = __nccwpck_require__(3480); +const transporters_1 = __nccwpck_require__(7633); +const oauth2common_1 = __nccwpck_require__(6653); +/** + * Implements the OAuth 2.0 token exchange based on + * https://tools.ietf.org/html/rfc8693 + */ +class StsCredentials extends oauth2common_1.OAuthClientAuthHandler { + /** + * Initializes an STS credentials instance. + * @param tokenExchangeEndpoint The token exchange endpoint. + * @param clientAuthentication The client authentication credentials if + * available. + */ + constructor(tokenExchangeEndpoint, clientAuthentication) { + super(clientAuthentication); + this.tokenExchangeEndpoint = tokenExchangeEndpoint; + this.transporter = new transporters_1.DefaultTransporter(); + } + /** + * Exchanges the provided token for another type of token based on the + * rfc8693 spec. + * @param stsCredentialsOptions The token exchange options used to populate + * the token exchange request. + * @param additionalHeaders Optional additional headers to pass along the + * request. + * @param options Optional additional GCP-specific non-spec defined options + * to send with the request. + * Example: `&options=${encodeUriComponent(JSON.stringified(options))}` + * @return A promise that resolves with the token exchange response containing + * the requested token and its expiration time. + */ + async exchangeToken(stsCredentialsOptions, additionalHeaders, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + options) { + var _a, _b, _c; + const values = { + grant_type: stsCredentialsOptions.grantType, + resource: stsCredentialsOptions.resource, + audience: stsCredentialsOptions.audience, + scope: (_a = stsCredentialsOptions.scope) === null || _a === void 0 ? void 0 : _a.join(' '), + requested_token_type: stsCredentialsOptions.requestedTokenType, + subject_token: stsCredentialsOptions.subjectToken, + subject_token_type: stsCredentialsOptions.subjectTokenType, + actor_token: (_b = stsCredentialsOptions.actingParty) === null || _b === void 0 ? void 0 : _b.actorToken, + actor_token_type: (_c = stsCredentialsOptions.actingParty) === null || _c === void 0 ? void 0 : _c.actorTokenType, + // Non-standard GCP-specific options. + options: options && JSON.stringify(options), + }; + // Remove undefined fields. + Object.keys(values).forEach(key => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + if (typeof values[key] === 'undefined') { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + delete values[key]; + } + }); + const headers = { + 'Content-Type': 'application/x-www-form-urlencoded', + }; + // Inject additional STS headers if available. + Object.assign(headers, additionalHeaders || {}); + const opts = { + ...StsCredentials.RETRY_CONFIG, + url: this.tokenExchangeEndpoint.toString(), + method: 'POST', + headers, + data: querystring.stringify(values), + responseType: 'json', + }; + // Apply OAuth client authentication. + this.applyClientAuthenticationOptions(opts); + try { + const response = await this.transporter.request(opts); + // Successful response. + const stsSuccessfulResponse = response.data; + stsSuccessfulResponse.res = response; + return stsSuccessfulResponse; + } + catch (error) { + // Translate error to OAuthError. + if (error instanceof gaxios_1.GaxiosError && error.response) { + throw (0, oauth2common_1.getErrorFromOAuthErrorResponse)(error.response.data, + // Preserve other fields from the original error. + error); + } + // Request could fail before the server responds. + throw error; + } + } +} +exports.StsCredentials = StsCredentials; + + +/***/ }), + +/***/ 4627: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.UrlSubjectTokenSupplier = void 0; +/** + * Internal subject token supplier implementation used when a URL + * is configured in the credential configuration used to build an {@link IdentityPoolClient} + */ +class UrlSubjectTokenSupplier { + /** + * Instantiates a URL subject token supplier. + * @param opts The URL subject token supplier options to build the supplier with. + */ + constructor(opts) { + this.url = opts.url; + this.formatType = opts.formatType; + this.subjectTokenFieldName = opts.subjectTokenFieldName; + this.headers = opts.headers; + this.additionalGaxiosOptions = opts.additionalGaxiosOptions; + } + /** + * Sends a GET request to the URL provided in the constructor and resolves + * with the returned external subject token. + * @param context {@link ExternalAccountSupplierContext} from the calling + * {@link IdentityPoolClient}, contains the requested audience and subject + * token type for the external account identity. Not used. + */ + async getSubjectToken(context) { + const opts = { + ...this.additionalGaxiosOptions, + url: this.url, + method: 'GET', + headers: this.headers, + responseType: this.formatType, + }; + let subjectToken; + if (this.formatType === 'text') { + const response = await context.transporter.request(opts); + subjectToken = response.data; + } + else if (this.formatType === 'json' && this.subjectTokenFieldName) { + const response = await context.transporter.request(opts); + subjectToken = response.data[this.subjectTokenFieldName]; + } + if (!subjectToken) { + throw new Error('Unable to parse the subject_token from the credential_source URL'); + } + return subjectToken; + } +} +exports.UrlSubjectTokenSupplier = UrlSubjectTokenSupplier; + + +/***/ }), + +/***/ 3438: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +/* global window */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.BrowserCrypto = void 0; +// This file implements crypto functions we need using in-browser +// SubtleCrypto interface `window.crypto.subtle`. +const base64js = __nccwpck_require__(8793); +const crypto_1 = __nccwpck_require__(8851); +class BrowserCrypto { + constructor() { + if (typeof window === 'undefined' || + window.crypto === undefined || + window.crypto.subtle === undefined) { + throw new Error("SubtleCrypto not found. Make sure it's an https:// website."); + } + } + async sha256DigestBase64(str) { + // SubtleCrypto digest() method is async, so we must make + // this method async as well. + // To calculate SHA256 digest using SubtleCrypto, we first + // need to convert an input string to an ArrayBuffer: + const inputBuffer = new TextEncoder().encode(str); + // Result is ArrayBuffer as well. + const outputBuffer = await window.crypto.subtle.digest('SHA-256', inputBuffer); + return base64js.fromByteArray(new Uint8Array(outputBuffer)); + } + randomBytesBase64(count) { + const array = new Uint8Array(count); + window.crypto.getRandomValues(array); + return base64js.fromByteArray(array); + } + static padBase64(base64) { + // base64js requires padding, so let's add some '=' + while (base64.length % 4 !== 0) { + base64 += '='; + } + return base64; + } + async verify(pubkey, data, signature) { + const algo = { + name: 'RSASSA-PKCS1-v1_5', + hash: { name: 'SHA-256' }, + }; + const dataArray = new TextEncoder().encode(data); + const signatureArray = base64js.toByteArray(BrowserCrypto.padBase64(signature)); + const cryptoKey = await window.crypto.subtle.importKey('jwk', pubkey, algo, true, ['verify']); + // SubtleCrypto's verify method is async so we must make + // this method async as well. + const result = await window.crypto.subtle.verify(algo, cryptoKey, signatureArray, dataArray); + return result; + } + async sign(privateKey, data) { + const algo = { + name: 'RSASSA-PKCS1-v1_5', + hash: { name: 'SHA-256' }, + }; + const dataArray = new TextEncoder().encode(data); + const cryptoKey = await window.crypto.subtle.importKey('jwk', privateKey, algo, true, ['sign']); + // SubtleCrypto's sign method is async so we must make + // this method async as well. + const result = await window.crypto.subtle.sign(algo, cryptoKey, dataArray); + return base64js.fromByteArray(new Uint8Array(result)); + } + decodeBase64StringUtf8(base64) { + const uint8array = base64js.toByteArray(BrowserCrypto.padBase64(base64)); + const result = new TextDecoder().decode(uint8array); + return result; + } + encodeBase64StringUtf8(text) { + const uint8array = new TextEncoder().encode(text); + const result = base64js.fromByteArray(uint8array); + return result; + } + /** + * Computes the SHA-256 hash of the provided string. + * @param str The plain text string to hash. + * @return A promise that resolves with the SHA-256 hash of the provided + * string in hexadecimal encoding. + */ + async sha256DigestHex(str) { + // SubtleCrypto digest() method is async, so we must make + // this method async as well. + // To calculate SHA256 digest using SubtleCrypto, we first + // need to convert an input string to an ArrayBuffer: + const inputBuffer = new TextEncoder().encode(str); + // Result is ArrayBuffer as well. + const outputBuffer = await window.crypto.subtle.digest('SHA-256', inputBuffer); + return (0, crypto_1.fromArrayBufferToHex)(outputBuffer); + } + /** + * Computes the HMAC hash of a message using the provided crypto key and the + * SHA-256 algorithm. + * @param key The secret crypto key in utf-8 or ArrayBuffer format. + * @param msg The plain text message. + * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer + * format. + */ + async signWithHmacSha256(key, msg) { + // Convert key, if provided in ArrayBuffer format, to string. + const rawKey = typeof key === 'string' + ? key + : String.fromCharCode(...new Uint16Array(key)); + const enc = new TextEncoder(); + const cryptoKey = await window.crypto.subtle.importKey('raw', enc.encode(rawKey), { + name: 'HMAC', + hash: { + name: 'SHA-256', + }, + }, false, ['sign']); + return window.crypto.subtle.sign('HMAC', cryptoKey, enc.encode(msg)); + } +} +exports.BrowserCrypto = BrowserCrypto; + + +/***/ }), + +/***/ 8851: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +/* global window */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createCrypto = createCrypto; +exports.hasBrowserCrypto = hasBrowserCrypto; +exports.fromArrayBufferToHex = fromArrayBufferToHex; +const crypto_1 = __nccwpck_require__(3438); +const crypto_2 = __nccwpck_require__(7388); +function createCrypto() { + if (hasBrowserCrypto()) { + return new crypto_1.BrowserCrypto(); + } + return new crypto_2.NodeCrypto(); +} +function hasBrowserCrypto() { + return (typeof window !== 'undefined' && + typeof window.crypto !== 'undefined' && + typeof window.crypto.subtle !== 'undefined'); +} +/** + * Converts an ArrayBuffer to a hexadecimal string. + * @param arrayBuffer The ArrayBuffer to convert to hexadecimal string. + * @return The hexadecimal encoding of the ArrayBuffer. + */ +function fromArrayBufferToHex(arrayBuffer) { + // Convert buffer to byte array. + const byteArray = Array.from(new Uint8Array(arrayBuffer)); + // Convert bytes to hex string. + return byteArray + .map(byte => { + return byte.toString(16).padStart(2, '0'); + }) + .join(''); +} + + +/***/ }), + +/***/ 7388: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NodeCrypto = void 0; +const crypto = __nccwpck_require__(6982); +class NodeCrypto { + async sha256DigestBase64(str) { + return crypto.createHash('sha256').update(str).digest('base64'); + } + randomBytesBase64(count) { + return crypto.randomBytes(count).toString('base64'); + } + async verify(pubkey, data, signature) { + const verifier = crypto.createVerify('RSA-SHA256'); + verifier.update(data); + verifier.end(); + return verifier.verify(pubkey, signature, 'base64'); + } + async sign(privateKey, data) { + const signer = crypto.createSign('RSA-SHA256'); + signer.update(data); + signer.end(); + return signer.sign(privateKey, 'base64'); + } + decodeBase64StringUtf8(base64) { + return Buffer.from(base64, 'base64').toString('utf-8'); + } + encodeBase64StringUtf8(text) { + return Buffer.from(text, 'utf-8').toString('base64'); + } + /** + * Computes the SHA-256 hash of the provided string. + * @param str The plain text string to hash. + * @return A promise that resolves with the SHA-256 hash of the provided + * string in hexadecimal encoding. + */ + async sha256DigestHex(str) { + return crypto.createHash('sha256').update(str).digest('hex'); + } + /** + * Computes the HMAC hash of a message using the provided crypto key and the + * SHA-256 algorithm. + * @param key The secret crypto key in utf-8 or ArrayBuffer format. + * @param msg The plain text message. + * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer + * format. + */ + async signWithHmacSha256(key, msg) { + const cryptoKey = typeof key === 'string' ? key : toBuffer(key); + return toArrayBuffer(crypto.createHmac('sha256', cryptoKey).update(msg).digest()); + } +} +exports.NodeCrypto = NodeCrypto; +/** + * Converts a Node.js Buffer to an ArrayBuffer. + * https://stackoverflow.com/questions/8609289/convert-a-binary-nodejs-buffer-to-javascript-arraybuffer + * @param buffer The Buffer input to covert. + * @return The ArrayBuffer representation of the input. + */ +function toArrayBuffer(buffer) { + return buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength); +} +/** + * Converts an ArrayBuffer to a Node.js Buffer. + * @param arrayBuffer The ArrayBuffer input to covert. + * @return The Buffer representation of the input. + */ +function toBuffer(arrayBuffer) { + return Buffer.from(arrayBuffer); +} + + +/***/ }), + +/***/ 492: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GoogleAuth = exports.auth = exports.DefaultTransporter = exports.PassThroughClient = exports.ExecutableError = exports.PluggableAuthClient = exports.DownscopedClient = exports.BaseExternalAccountClient = exports.ExternalAccountClient = exports.IdentityPoolClient = exports.AwsRequestSigner = exports.AwsClient = exports.UserRefreshClient = exports.LoginTicket = exports.ClientAuthentication = exports.OAuth2Client = exports.CodeChallengeMethod = exports.Impersonated = exports.JWT = exports.JWTAccess = exports.IdTokenClient = exports.IAMAuth = exports.GCPEnv = exports.Compute = exports.DEFAULT_UNIVERSE = exports.AuthClient = exports.gaxios = exports.gcpMetadata = void 0; +// Copyright 2017 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +const googleauth_1 = __nccwpck_require__(5934); +Object.defineProperty(exports, "GoogleAuth", ({ enumerable: true, get: function () { return googleauth_1.GoogleAuth; } })); +// Export common deps to ensure types/instances are the exact match. Useful +// for consistently configuring the library across versions. +exports.gcpMetadata = __nccwpck_require__(3046); +exports.gaxios = __nccwpck_require__(7003); +var authclient_1 = __nccwpck_require__(4810); +Object.defineProperty(exports, "AuthClient", ({ enumerable: true, get: function () { return authclient_1.AuthClient; } })); +Object.defineProperty(exports, "DEFAULT_UNIVERSE", ({ enumerable: true, get: function () { return authclient_1.DEFAULT_UNIVERSE; } })); +var computeclient_1 = __nccwpck_require__(977); +Object.defineProperty(exports, "Compute", ({ enumerable: true, get: function () { return computeclient_1.Compute; } })); +var envDetect_1 = __nccwpck_require__(963); +Object.defineProperty(exports, "GCPEnv", ({ enumerable: true, get: function () { return envDetect_1.GCPEnv; } })); +var iam_1 = __nccwpck_require__(7009); +Object.defineProperty(exports, "IAMAuth", ({ enumerable: true, get: function () { return iam_1.IAMAuth; } })); +var idtokenclient_1 = __nccwpck_require__(2718); +Object.defineProperty(exports, "IdTokenClient", ({ enumerable: true, get: function () { return idtokenclient_1.IdTokenClient; } })); +var jwtaccess_1 = __nccwpck_require__(7060); +Object.defineProperty(exports, "JWTAccess", ({ enumerable: true, get: function () { return jwtaccess_1.JWTAccess; } })); +var jwtclient_1 = __nccwpck_require__(5277); +Object.defineProperty(exports, "JWT", ({ enumerable: true, get: function () { return jwtclient_1.JWT; } })); +var impersonated_1 = __nccwpck_require__(9964); +Object.defineProperty(exports, "Impersonated", ({ enumerable: true, get: function () { return impersonated_1.Impersonated; } })); +var oauth2client_1 = __nccwpck_require__(91); +Object.defineProperty(exports, "CodeChallengeMethod", ({ enumerable: true, get: function () { return oauth2client_1.CodeChallengeMethod; } })); +Object.defineProperty(exports, "OAuth2Client", ({ enumerable: true, get: function () { return oauth2client_1.OAuth2Client; } })); +Object.defineProperty(exports, "ClientAuthentication", ({ enumerable: true, get: function () { return oauth2client_1.ClientAuthentication; } })); +var loginticket_1 = __nccwpck_require__(3882); +Object.defineProperty(exports, "LoginTicket", ({ enumerable: true, get: function () { return loginticket_1.LoginTicket; } })); +var refreshclient_1 = __nccwpck_require__(9807); +Object.defineProperty(exports, "UserRefreshClient", ({ enumerable: true, get: function () { return refreshclient_1.UserRefreshClient; } })); +var awsclient_1 = __nccwpck_require__(1261); +Object.defineProperty(exports, "AwsClient", ({ enumerable: true, get: function () { return awsclient_1.AwsClient; } })); +var awsrequestsigner_1 = __nccwpck_require__(7647); +Object.defineProperty(exports, "AwsRequestSigner", ({ enumerable: true, get: function () { return awsrequestsigner_1.AwsRequestSigner; } })); +var identitypoolclient_1 = __nccwpck_require__(9960); +Object.defineProperty(exports, "IdentityPoolClient", ({ enumerable: true, get: function () { return identitypoolclient_1.IdentityPoolClient; } })); +var externalclient_1 = __nccwpck_require__(8323); +Object.defineProperty(exports, "ExternalAccountClient", ({ enumerable: true, get: function () { return externalclient_1.ExternalAccountClient; } })); +var baseexternalclient_1 = __nccwpck_require__(142); +Object.defineProperty(exports, "BaseExternalAccountClient", ({ enumerable: true, get: function () { return baseexternalclient_1.BaseExternalAccountClient; } })); +var downscopedclient_1 = __nccwpck_require__(7556); +Object.defineProperty(exports, "DownscopedClient", ({ enumerable: true, get: function () { return downscopedclient_1.DownscopedClient; } })); +var pluggable_auth_client_1 = __nccwpck_require__(6077); +Object.defineProperty(exports, "PluggableAuthClient", ({ enumerable: true, get: function () { return pluggable_auth_client_1.PluggableAuthClient; } })); +Object.defineProperty(exports, "ExecutableError", ({ enumerable: true, get: function () { return pluggable_auth_client_1.ExecutableError; } })); +var passthrough_1 = __nccwpck_require__(2045); +Object.defineProperty(exports, "PassThroughClient", ({ enumerable: true, get: function () { return passthrough_1.PassThroughClient; } })); +var transporters_1 = __nccwpck_require__(7633); +Object.defineProperty(exports, "DefaultTransporter", ({ enumerable: true, get: function () { return transporters_1.DefaultTransporter; } })); +const auth = new googleauth_1.GoogleAuth(); +exports.auth = auth; + + +/***/ }), + +/***/ 8290: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2017 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.validate = validate; +// Accepts an options object passed from the user to the API. In the +// previous version of the API, it referred to a `Request` options object. +// Now it refers to an Axiox Request Config object. This is here to help +// ensure users don't pass invalid options when they upgrade from 0.x to 1.x. +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function validate(options) { + const vpairs = [ + { invalid: 'uri', expected: 'url' }, + { invalid: 'json', expected: 'data' }, + { invalid: 'qs', expected: 'params' }, + ]; + for (const pair of vpairs) { + if (options[pair.invalid]) { + const e = `'${pair.invalid}' is not a valid configuration option. Please use '${pair.expected}' instead. This library is using Axios for requests. Please see https://github.com/axios/axios to learn more about the valid request options.`; + throw new Error(e); + } + } +} + + +/***/ }), + +/***/ 7633: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DefaultTransporter = void 0; +const gaxios_1 = __nccwpck_require__(7003); +const options_1 = __nccwpck_require__(8290); +// eslint-disable-next-line @typescript-eslint/no-var-requires +const pkg = __nccwpck_require__(6066); +const PRODUCT_NAME = 'google-api-nodejs-client'; +class DefaultTransporter { + constructor() { + /** + * A configurable, replacable `Gaxios` instance. + */ + this.instance = new gaxios_1.Gaxios(); + } + /** + * Configures request options before making a request. + * @param opts GaxiosOptions options. + * @return Configured options. + */ + configure(opts = {}) { + opts.headers = opts.headers || {}; + if (typeof window === 'undefined') { + // set transporter user agent if not in browser + const uaValue = opts.headers['User-Agent']; + if (!uaValue) { + opts.headers['User-Agent'] = DefaultTransporter.USER_AGENT; + } + else if (!uaValue.includes(`${PRODUCT_NAME}/`)) { + opts.headers['User-Agent'] = + `${uaValue} ${DefaultTransporter.USER_AGENT}`; + } + // track google-auth-library-nodejs version: + if (!opts.headers['x-goog-api-client']) { + const nodeVersion = process.version.replace(/^v/, ''); + opts.headers['x-goog-api-client'] = `gl-node/${nodeVersion}`; + } + } + return opts; + } + /** + * Makes a request using Gaxios with given options. + * @param opts GaxiosOptions options. + * @param callback optional callback that contains GaxiosResponse object. + * @return GaxiosPromise, assuming no callback is passed. + */ + request(opts) { + // ensure the user isn't passing in request-style options + opts = this.configure(opts); + (0, options_1.validate)(opts); + return this.instance.request(opts).catch(e => { + throw this.processError(e); + }); + } + get defaults() { + return this.instance.defaults; + } + set defaults(opts) { + this.instance.defaults = opts; + } + /** + * Changes the error to include details from the body. + */ + processError(e) { + const res = e.response; + const err = e; + const body = res ? res.data : null; + if (res && body && body.error && res.status !== 200) { + if (typeof body.error === 'string') { + err.message = body.error; + err.status = res.status; + } + else if (Array.isArray(body.error.errors)) { + err.message = body.error.errors + .map((err2) => err2.message) + .join('\n'); + err.code = body.error.code; + err.errors = body.error.errors; + } + else { + err.message = body.error.message; + err.code = body.error.code; + } + } + else if (res && res.status >= 400) { + // Consider all 4xx and 5xx responses errors. + err.message = body; + err.status = res.status; + } + return err; + } +} +exports.DefaultTransporter = DefaultTransporter; +/** + * Default user agent. + */ +DefaultTransporter.USER_AGENT = `${PRODUCT_NAME}/${pkg.version}`; + + +/***/ }), + +/***/ 7870: +/***/ (function(__unused_webpack_module, exports) { + +"use strict"; + +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _LRUCache_instances, _LRUCache_cache, _LRUCache_moveToEnd, _LRUCache_evict; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.LRUCache = void 0; +exports.snakeToCamel = snakeToCamel; +exports.originalOrCamelOptions = originalOrCamelOptions; +/** + * Returns the camel case of a provided string. + * + * @remarks + * + * Match any `_` and not `_` pair, then return the uppercase of the not `_` + * character. + * + * @internal + * + * @param str the string to convert + * @returns the camelCase'd string + */ +function snakeToCamel(str) { + return str.replace(/([_][^_])/g, match => match.slice(1).toUpperCase()); +} +/** + * Get the value of `obj[key]` or `obj[camelCaseKey]`, with a preference + * for original, non-camelCase key. + * + * @param obj object to lookup a value in + * @returns a `get` function for getting `obj[key || snakeKey]`, if available + */ +function originalOrCamelOptions(obj) { + /** + * + * @param key an index of object, preferably snake_case + * @returns the value `obj[key || snakeKey]`, if available + */ + function get(key) { + var _a; + const o = (obj || {}); + return (_a = o[key]) !== null && _a !== void 0 ? _a : o[snakeToCamel(key)]; + } + return { get }; +} +/** + * A simple LRU cache utility. + * Not meant for external usage. + * + * @experimental + * @internal + */ +class LRUCache { + constructor(options) { + _LRUCache_instances.add(this); + /** + * Maps are in order. Thus, the older item is the first item. + * + * {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map} + */ + _LRUCache_cache.set(this, new Map()); + this.capacity = options.capacity; + this.maxAge = options.maxAge; + } + /** + * Add an item to the cache. + * + * @param key the key to upsert + * @param value the value of the key + */ + set(key, value) { + __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_moveToEnd).call(this, key, value); + __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_evict).call(this); + } + /** + * Get an item from the cache. + * + * @param key the key to retrieve + */ + get(key) { + const item = __classPrivateFieldGet(this, _LRUCache_cache, "f").get(key); + if (!item) + return; + __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_moveToEnd).call(this, key, item.value); + __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_evict).call(this); + return item.value; + } +} +exports.LRUCache = LRUCache; +_LRUCache_cache = new WeakMap(), _LRUCache_instances = new WeakSet(), _LRUCache_moveToEnd = function _LRUCache_moveToEnd(key, value) { + __classPrivateFieldGet(this, _LRUCache_cache, "f").delete(key); + __classPrivateFieldGet(this, _LRUCache_cache, "f").set(key, { + value, + lastAccessed: Date.now(), + }); +}, _LRUCache_evict = function _LRUCache_evict() { + const cutoffDate = this.maxAge ? Date.now() - this.maxAge : 0; + /** + * Because we know Maps are in order, this item is both the + * last item in the list (capacity) and oldest (maxAge). + */ + let oldestItem = __classPrivateFieldGet(this, _LRUCache_cache, "f").entries().next(); + while (!oldestItem.done && + (__classPrivateFieldGet(this, _LRUCache_cache, "f").size > this.capacity || // too many + oldestItem.value[1].lastAccessed < cutoffDate) // too old + ) { + __classPrivateFieldGet(this, _LRUCache_cache, "f").delete(oldestItem.value[0]); + oldestItem = __classPrivateFieldGet(this, _LRUCache_cache, "f").entries().next(); + } +}; + + +/***/ }), + +/***/ 1628: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Colours = void 0; +/** + * Handles figuring out if we can use ANSI colours and handing out the escape codes. + * + * This is for package-internal use only, and may change at any time. + * + * @private + * @internal + */ +class Colours { + /** + * @param stream The stream (e.g. process.stderr) + * @returns true if the stream should have colourization enabled + */ + static isEnabled(stream) { + return (stream.isTTY && + (typeof stream.getColorDepth === 'function' + ? stream.getColorDepth() > 2 + : true)); + } + static refresh() { + Colours.enabled = Colours.isEnabled(process.stderr); + if (!this.enabled) { + Colours.reset = ''; + Colours.bright = ''; + Colours.dim = ''; + Colours.red = ''; + Colours.green = ''; + Colours.yellow = ''; + Colours.blue = ''; + Colours.magenta = ''; + Colours.cyan = ''; + Colours.white = ''; + Colours.grey = ''; + } + else { + Colours.reset = '\u001b[0m'; + Colours.bright = '\u001b[1m'; + Colours.dim = '\u001b[2m'; + Colours.red = '\u001b[31m'; + Colours.green = '\u001b[32m'; + Colours.yellow = '\u001b[33m'; + Colours.blue = '\u001b[34m'; + Colours.magenta = '\u001b[35m'; + Colours.cyan = '\u001b[36m'; + Colours.white = '\u001b[37m'; + Colours.grey = '\u001b[90m'; + } + } +} +exports.Colours = Colours; +Colours.enabled = false; +Colours.reset = ''; +Colours.bright = ''; +Colours.dim = ''; +Colours.red = ''; +Colours.green = ''; +Colours.yellow = ''; +Colours.blue = ''; +Colours.magenta = ''; +Colours.cyan = ''; +Colours.white = ''; +Colours.grey = ''; +Colours.refresh(); +//# sourceMappingURL=colours.js.map + +/***/ }), + +/***/ 1577: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +__exportStar(__nccwpck_require__(4788), exports); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 4788: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2021-2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.env = exports.DebugLogBackendBase = exports.placeholder = exports.AdhocDebugLogger = exports.LogSeverity = void 0; +exports.getNodeBackend = getNodeBackend; +exports.getDebugBackend = getDebugBackend; +exports.getStructuredBackend = getStructuredBackend; +exports.setBackend = setBackend; +exports.log = log; +const node_events_1 = __nccwpck_require__(8474); +const process = __importStar(__nccwpck_require__(1708)); +const util = __importStar(__nccwpck_require__(7975)); +const colours_1 = __nccwpck_require__(1628); +// Some functions (as noted) are based on the Node standard library, from +// the following file: +// +// https://github.com/nodejs/node/blob/main/lib/internal/util/debuglog.js +/** + * This module defines an ad-hoc debug logger for Google Cloud Platform + * client libraries in Node. An ad-hoc debug logger is a tool which lets + * users use an external, unified interface (in this case, environment + * variables) to determine what logging they want to see at runtime. This + * isn't necessarily fed into the console, but is meant to be under the + * control of the user. The kind of logging that will be produced by this + * is more like "call retry happened", not "event you'd want to record + * in Cloud Logger". + * + * More for Googlers implementing libraries with it: + * go/cloud-client-logging-design + */ +/** + * Possible log levels. These are a subset of Cloud Observability levels. + * https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry#LogSeverity + */ +var LogSeverity; +(function (LogSeverity) { + LogSeverity["DEFAULT"] = "DEFAULT"; + LogSeverity["DEBUG"] = "DEBUG"; + LogSeverity["INFO"] = "INFO"; + LogSeverity["WARNING"] = "WARNING"; + LogSeverity["ERROR"] = "ERROR"; +})(LogSeverity || (exports.LogSeverity = LogSeverity = {})); +/** + * Our logger instance. This actually contains the meat of dealing + * with log lines, including EventEmitter. This contains the function + * that will be passed back to users of the package. + */ +class AdhocDebugLogger extends node_events_1.EventEmitter { + /** + * @param upstream The backend will pass a function that will be + * called whenever our logger function is invoked. + */ + constructor(namespace, upstream) { + super(); + this.namespace = namespace; + this.upstream = upstream; + this.func = Object.assign(this.invoke.bind(this), { + // Also add an instance pointer back to us. + instance: this, + // And pull over the EventEmitter functionality. + on: (event, listener) => this.on(event, listener), + }); + // Convenience methods for log levels. + this.func.debug = (...args) => this.invokeSeverity(LogSeverity.DEBUG, ...args); + this.func.info = (...args) => this.invokeSeverity(LogSeverity.INFO, ...args); + this.func.warn = (...args) => this.invokeSeverity(LogSeverity.WARNING, ...args); + this.func.error = (...args) => this.invokeSeverity(LogSeverity.ERROR, ...args); + this.func.sublog = (namespace) => log(namespace, this.func); + } + invoke(fields, ...args) { + // Push out any upstream logger first. + if (this.upstream) { + this.upstream(fields, ...args); + } + // Emit sink events. + this.emit('log', fields, args); + } + invokeSeverity(severity, ...args) { + this.invoke({ severity }, ...args); + } +} +exports.AdhocDebugLogger = AdhocDebugLogger; +/** + * This can be used in place of a real logger while waiting for Promises or disabling logging. + */ +exports.placeholder = new AdhocDebugLogger('', () => { }).func; +/** + * The base class for debug logging backends. It's possible to use this, but the + * same non-guarantees above still apply (unstable interface, etc). + * + * @private + * @internal + */ +class DebugLogBackendBase { + constructor() { + var _a; + this.cached = new Map(); + this.filters = []; + this.filtersSet = false; + // Look for the Node config variable for what systems to enable. We'll store + // these for the log method below, which will call setFilters() once. + let nodeFlag = (_a = process.env[exports.env.nodeEnables]) !== null && _a !== void 0 ? _a : '*'; + if (nodeFlag === 'all') { + nodeFlag = '*'; + } + this.filters = nodeFlag.split(','); + } + log(namespace, fields, ...args) { + try { + if (!this.filtersSet) { + this.setFilters(); + this.filtersSet = true; + } + let logger = this.cached.get(namespace); + if (!logger) { + logger = this.makeLogger(namespace); + this.cached.set(namespace, logger); + } + logger(fields, ...args); + } + catch (e) { + // Silently ignore all errors; we don't want them to interfere with + // the user's running app. + // e; + console.error(e); + } + } +} +exports.DebugLogBackendBase = DebugLogBackendBase; +// The basic backend. This one definitely works, but it's less feature-filled. +// +// Rather than using util.debuglog, this implements the same basic logic directly. +// The reason for this decision is that debuglog checks the value of the +// NODE_DEBUG environment variable before any user code runs; we therefore +// can't pipe our own enables into it (and util.debuglog will never print unless +// the user duplicates it into NODE_DEBUG, which isn't reasonable). +// +class NodeBackend extends DebugLogBackendBase { + constructor() { + super(...arguments); + // Default to allowing all systems, since we gate earlier based on whether the + // variable is empty. + this.enabledRegexp = /.*/g; + } + isEnabled(namespace) { + return this.enabledRegexp.test(namespace); + } + makeLogger(namespace) { + if (!this.enabledRegexp.test(namespace)) { + return () => { }; + } + return (fields, ...args) => { + var _a; + // TODO: `fields` needs to be turned into a string here, one way or another. + const nscolour = `${colours_1.Colours.green}${namespace}${colours_1.Colours.reset}`; + const pid = `${colours_1.Colours.yellow}${process.pid}${colours_1.Colours.reset}`; + let level; + switch (fields.severity) { + case LogSeverity.ERROR: + level = `${colours_1.Colours.red}${fields.severity}${colours_1.Colours.reset}`; + break; + case LogSeverity.INFO: + level = `${colours_1.Colours.magenta}${fields.severity}${colours_1.Colours.reset}`; + break; + case LogSeverity.WARNING: + level = `${colours_1.Colours.yellow}${fields.severity}${colours_1.Colours.reset}`; + break; + default: + level = (_a = fields.severity) !== null && _a !== void 0 ? _a : LogSeverity.DEFAULT; + break; + } + const msg = util.formatWithOptions({ colors: colours_1.Colours.enabled }, ...args); + const filteredFields = Object.assign({}, fields); + delete filteredFields.severity; + const fieldsJson = Object.getOwnPropertyNames(filteredFields).length + ? JSON.stringify(filteredFields) + : ''; + const fieldsColour = fieldsJson + ? `${colours_1.Colours.grey}${fieldsJson}${colours_1.Colours.reset}` + : ''; + console.error('%s [%s|%s] %s%s', pid, nscolour, level, msg, fieldsJson ? ` ${fieldsColour}` : ''); + }; + } + // Regexp patterns below are from here: + // https://github.com/nodejs/node/blob/c0aebed4b3395bd65d54b18d1fd00f071002ac20/lib/internal/util/debuglog.js#L36 + setFilters() { + const totalFilters = this.filters.join(','); + const regexp = totalFilters + .replace(/[|\\{}()[\]^$+?.]/g, '\\$&') + .replace(/\*/g, '.*') + .replace(/,/g, '$|^'); + this.enabledRegexp = new RegExp(`^${regexp}$`, 'i'); + } +} +/** + * @returns A backend based on Node util.debuglog; this is the default. + */ +function getNodeBackend() { + return new NodeBackend(); +} +class DebugBackend extends DebugLogBackendBase { + constructor(pkg) { + super(); + this.debugPkg = pkg; + } + makeLogger(namespace) { + const debugLogger = this.debugPkg(namespace); + return (fields, ...args) => { + // TODO: `fields` needs to be turned into a string here. + debugLogger(args[0], ...args.slice(1)); + }; + } + setFilters() { + var _a; + const existingFilters = (_a = process.env['NODE_DEBUG']) !== null && _a !== void 0 ? _a : ''; + process.env['NODE_DEBUG'] = `${existingFilters}${existingFilters ? ',' : ''}${this.filters.join(',')}`; + } +} +/** + * Creates a "debug" package backend. The user must call require('debug') and pass + * the resulting object to this function. + * + * ``` + * setBackend(getDebugBackend(require('debug'))) + * ``` + * + * https://www.npmjs.com/package/debug + * + * Note: Google does not explicitly endorse or recommend this package; it's just + * being provided as an option. + * + * @returns A backend based on the npm "debug" package. + */ +function getDebugBackend(debugPkg) { + return new DebugBackend(debugPkg); +} +/** + * This pretty much works like the Node logger, but it outputs structured + * logging JSON matching Google Cloud's ingestion specs. Rather than handling + * its own output, it wraps another backend. The passed backend must be a subclass + * of `DebugLogBackendBase` (any of the backends exposed by this package will work). + */ +class StructuredBackend extends DebugLogBackendBase { + constructor(upstream) { + var _a; + super(); + this.upstream = (_a = upstream) !== null && _a !== void 0 ? _a : new NodeBackend(); + } + makeLogger(namespace) { + const debugLogger = this.upstream.makeLogger(namespace); + return (fields, ...args) => { + var _a; + const severity = (_a = fields.severity) !== null && _a !== void 0 ? _a : LogSeverity.INFO; + const json = Object.assign({ + severity, + message: util.format(...args), + }, fields); + const jsonString = JSON.stringify(json); + debugLogger(fields, jsonString); + }; + } + setFilters() { + this.upstream.setFilters(); + } +} +/** + * Creates a "structured logging" backend. This pretty much works like the + * Node logger, but it outputs structured logging JSON matching Google + * Cloud's ingestion specs instead of plain text. + * + * ``` + * setBackend(getStructuredBackend()) + * ``` + * + * @param upstream If you want to use something besides the Node backend to + * write the actual log lines into, pass that here. + * @returns A backend based on Google Cloud structured logging. + */ +function getStructuredBackend(upstream) { + return new StructuredBackend(upstream); +} +/** + * The environment variables that we standardized on, for all ad-hoc logging. + */ +exports.env = { + /** + * Filter wildcards specific to the Node syntax, and similar to the built-in + * utils.debuglog() environment variable. If missing, disables logging. + */ + nodeEnables: 'GOOGLE_SDK_NODE_LOGGING', +}; +// Keep a copy of all namespaced loggers so users can reliably .on() them. +// Note that these cached functions will need to deal with changes in the backend. +const loggerCache = new Map(); +// Our current global backend. This might be: +let cachedBackend = undefined; +/** + * Set the backend to use for our log output. + * - A backend object + * - null to disable logging + * - undefined for "nothing yet", defaults to the Node backend + * + * @param backend Results from one of the get*Backend() functions. + */ +function setBackend(backend) { + cachedBackend = backend; + loggerCache.clear(); +} +/** + * Creates a logging function. Multiple calls to this with the same namespace + * will produce the same logger, with the same event emitter hooks. + * + * Namespaces can be a simple string ("system" name), or a qualified string + * (system:subsystem), which can be used for filtering, or for "system:*". + * + * @param namespace The namespace, a descriptive text string. + * @returns A function you can call that works similar to console.log(). + */ +function log(namespace, parent) { + // If the enable flag isn't set, do nothing. + const enablesFlag = process.env[exports.env.nodeEnables]; + if (!enablesFlag) { + return exports.placeholder; + } + // This might happen mostly if the typings are dropped in a user's code, + // or if they're calling from JavaScript. + if (!namespace) { + return exports.placeholder; + } + // Handle sub-loggers. + if (parent) { + namespace = `${parent.instance.namespace}:${namespace}`; + } + // Reuse loggers so things like event sinks are persistent. + const existing = loggerCache.get(namespace); + if (existing) { + return existing.func; + } + // Do we have a backend yet? + if (cachedBackend === null) { + // Explicitly disabled. + return exports.placeholder; + } + else if (cachedBackend === undefined) { + // One hasn't been made yet, so default to Node. + cachedBackend = getNodeBackend(); + } + // The logger is further wrapped so we can handle the backend changing out. + const logger = (() => { + let previousBackend = undefined; + const newLogger = new AdhocDebugLogger(namespace, (fields, ...args) => { + if (previousBackend !== cachedBackend) { + // Did the user pass a custom backend? + if (cachedBackend === null) { + // Explicitly disabled. + return; + } + else if (cachedBackend === undefined) { + // One hasn't been made yet, so default to Node. + cachedBackend = getNodeBackend(); + } + previousBackend = cachedBackend; + } + cachedBackend === null || cachedBackend === void 0 ? void 0 : cachedBackend.log(namespace, fields, ...args); + }); + return newLogger; + })(); + loggerCache.set(namespace, logger); + return logger.func; +} +//# sourceMappingURL=logging-utils.js.map + +/***/ }), + +/***/ 8568: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +/** + * Copyright 2018 Google LLC + * + * Distributed under MIT license. + * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT + */ +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var _GoogleToken_instances, _GoogleToken_inFlightRequest, _GoogleToken_getTokenAsync, _GoogleToken_getTokenAsyncInner, _GoogleToken_ensureEmail, _GoogleToken_revokeTokenAsync, _GoogleToken_configure, _GoogleToken_requestToken; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GoogleToken = void 0; +const fs = __nccwpck_require__(9896); +const gaxios_1 = __nccwpck_require__(7003); +const jws = __nccwpck_require__(3324); +const path = __nccwpck_require__(6928); +const util_1 = __nccwpck_require__(9023); +const readFile = fs.readFile + ? (0, util_1.promisify)(fs.readFile) + : async () => { + // if running in the web-browser, fs.readFile may not have been shimmed. + throw new ErrorWithCode('use key rather than keyFile.', 'MISSING_CREDENTIALS'); + }; +const GOOGLE_TOKEN_URL = 'https://www.googleapis.com/oauth2/v4/token'; +const GOOGLE_REVOKE_TOKEN_URL = 'https://accounts.google.com/o/oauth2/revoke?token='; +class ErrorWithCode extends Error { + constructor(message, code) { + super(message); + this.code = code; + } +} +class GoogleToken { + get accessToken() { + return this.rawToken ? this.rawToken.access_token : undefined; + } + get idToken() { + return this.rawToken ? this.rawToken.id_token : undefined; + } + get tokenType() { + return this.rawToken ? this.rawToken.token_type : undefined; + } + get refreshToken() { + return this.rawToken ? this.rawToken.refresh_token : undefined; + } + /** + * Create a GoogleToken. + * + * @param options Configuration object. + */ + constructor(options) { + _GoogleToken_instances.add(this); + this.transporter = { + request: opts => (0, gaxios_1.request)(opts), + }; + _GoogleToken_inFlightRequest.set(this, void 0); + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_configure).call(this, options); + } + /** + * Returns whether the token has expired. + * + * @return true if the token has expired, false otherwise. + */ + hasExpired() { + const now = new Date().getTime(); + if (this.rawToken && this.expiresAt) { + return now >= this.expiresAt; + } + else { + return true; + } + } + /** + * Returns whether the token will expire within eagerRefreshThresholdMillis + * + * @return true if the token will be expired within eagerRefreshThresholdMillis, false otherwise. + */ + isTokenExpiring() { + var _a; + const now = new Date().getTime(); + const eagerRefreshThresholdMillis = (_a = this.eagerRefreshThresholdMillis) !== null && _a !== void 0 ? _a : 0; + if (this.rawToken && this.expiresAt) { + return this.expiresAt <= now + eagerRefreshThresholdMillis; + } + else { + return true; + } + } + getToken(callback, opts = {}) { + if (typeof callback === 'object') { + opts = callback; + callback = undefined; + } + opts = Object.assign({ + forceRefresh: false, + }, opts); + if (callback) { + const cb = callback; + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_getTokenAsync).call(this, opts).then(t => cb(null, t), callback); + return; + } + return __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_getTokenAsync).call(this, opts); + } + /** + * Given a keyFile, extract the key and client email if available + * @param keyFile Path to a json, pem, or p12 file that contains the key. + * @returns an object with privateKey and clientEmail properties + */ + async getCredentials(keyFile) { + const ext = path.extname(keyFile); + switch (ext) { + case '.json': { + const key = await readFile(keyFile, 'utf8'); + const body = JSON.parse(key); + const privateKey = body.private_key; + const clientEmail = body.client_email; + if (!privateKey || !clientEmail) { + throw new ErrorWithCode('private_key and client_email are required.', 'MISSING_CREDENTIALS'); + } + return { privateKey, clientEmail }; + } + case '.der': + case '.crt': + case '.pem': { + const privateKey = await readFile(keyFile, 'utf8'); + return { privateKey }; + } + case '.p12': + case '.pfx': { + throw new ErrorWithCode('*.p12 certificates are not supported after v6.1.2. ' + + 'Consider utilizing *.json format or converting *.p12 to *.pem using the OpenSSL CLI.', 'UNKNOWN_CERTIFICATE_TYPE'); + } + default: + throw new ErrorWithCode('Unknown certificate type. Type is determined based on file extension. ' + + 'Current supported extensions are *.json, and *.pem.', 'UNKNOWN_CERTIFICATE_TYPE'); + } + } + revokeToken(callback) { + if (callback) { + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_revokeTokenAsync).call(this).then(() => callback(), callback); + return; + } + return __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_revokeTokenAsync).call(this); + } +} +exports.GoogleToken = GoogleToken; +_GoogleToken_inFlightRequest = new WeakMap(), _GoogleToken_instances = new WeakSet(), _GoogleToken_getTokenAsync = async function _GoogleToken_getTokenAsync(opts) { + if (__classPrivateFieldGet(this, _GoogleToken_inFlightRequest, "f") && !opts.forceRefresh) { + return __classPrivateFieldGet(this, _GoogleToken_inFlightRequest, "f"); + } + try { + return await (__classPrivateFieldSet(this, _GoogleToken_inFlightRequest, __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_getTokenAsyncInner).call(this, opts), "f")); + } + finally { + __classPrivateFieldSet(this, _GoogleToken_inFlightRequest, undefined, "f"); + } +}, _GoogleToken_getTokenAsyncInner = async function _GoogleToken_getTokenAsyncInner(opts) { + if (this.isTokenExpiring() === false && opts.forceRefresh === false) { + return Promise.resolve(this.rawToken); + } + if (!this.key && !this.keyFile) { + throw new Error('No key or keyFile set.'); + } + if (!this.key && this.keyFile) { + const creds = await this.getCredentials(this.keyFile); + this.key = creds.privateKey; + this.iss = creds.clientEmail || this.iss; + if (!creds.clientEmail) { + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_ensureEmail).call(this); + } + } + return __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_requestToken).call(this); +}, _GoogleToken_ensureEmail = function _GoogleToken_ensureEmail() { + if (!this.iss) { + throw new ErrorWithCode('email is required.', 'MISSING_CREDENTIALS'); + } +}, _GoogleToken_revokeTokenAsync = async function _GoogleToken_revokeTokenAsync() { + if (!this.accessToken) { + throw new Error('No token to revoke.'); + } + const url = GOOGLE_REVOKE_TOKEN_URL + this.accessToken; + await this.transporter.request({ + url, + retry: true, + }); + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_configure).call(this, { + email: this.iss, + sub: this.sub, + key: this.key, + keyFile: this.keyFile, + scope: this.scope, + additionalClaims: this.additionalClaims, + }); +}, _GoogleToken_configure = function _GoogleToken_configure(options = {}) { + this.keyFile = options.keyFile; + this.key = options.key; + this.rawToken = undefined; + this.iss = options.email || options.iss; + this.sub = options.sub; + this.additionalClaims = options.additionalClaims; + if (typeof options.scope === 'object') { + this.scope = options.scope.join(' '); + } + else { + this.scope = options.scope; + } + this.eagerRefreshThresholdMillis = options.eagerRefreshThresholdMillis; + if (options.transporter) { + this.transporter = options.transporter; + } +}, _GoogleToken_requestToken = +/** + * Request the token from Google. + */ +async function _GoogleToken_requestToken() { + var _a, _b; + const iat = Math.floor(new Date().getTime() / 1000); + const additionalClaims = this.additionalClaims || {}; + const payload = Object.assign({ + iss: this.iss, + scope: this.scope, + aud: GOOGLE_TOKEN_URL, + exp: iat + 3600, + iat, + sub: this.sub, + }, additionalClaims); + const signedJWT = jws.sign({ + header: { alg: 'RS256' }, + payload, + secret: this.key, + }); + try { + const r = await this.transporter.request({ + method: 'POST', + url: GOOGLE_TOKEN_URL, + data: { + grant_type: 'urn:ietf:params:oauth:grant-type:jwt-bearer', + assertion: signedJWT, + }, + headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, + responseType: 'json', + retryConfig: { + httpMethodsToRetry: ['POST'], + }, + }); + this.rawToken = r.data; + this.expiresAt = + r.data.expires_in === null || r.data.expires_in === undefined + ? undefined + : (iat + r.data.expires_in) * 1000; + return this.rawToken; + } + catch (e) { + this.rawToken = undefined; + this.tokenExpires = undefined; + const body = e.response && ((_a = e.response) === null || _a === void 0 ? void 0 : _a.data) + ? (_b = e.response) === null || _b === void 0 ? void 0 : _b.data + : {}; + if (body.error) { + const desc = body.error_description + ? `: ${body.error_description}` + : ''; + e.message = `${body.error}${desc}`; + } + throw e; + } +}; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 3813: +/***/ ((module) => { + +"use strict"; + + +module.exports = (flag, argv = process.argv) => { + const prefix = flag.startsWith('-') ? '' : (flag.length === 1 ? '-' : '--'); + const position = argv.indexOf(prefix + flag); + const terminatorPosition = argv.indexOf('--'); + return position !== -1 && (terminatorPosition === -1 || position < terminatorPosition); +}; + + +/***/ }), + +/***/ 7847: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const net_1 = __importDefault(__nccwpck_require__(9278)); +const tls_1 = __importDefault(__nccwpck_require__(4756)); +const url_1 = __importDefault(__nccwpck_require__(7016)); +const debug_1 = __importDefault(__nccwpck_require__(2830)); +const once_1 = __importDefault(__nccwpck_require__(8662)); +const agent_base_1 = __nccwpck_require__(2960); +const debug = (0, debug_1.default)('http-proxy-agent'); +function isHTTPS(protocol) { + return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false; +} +/** + * The `HttpProxyAgent` implements an HTTP Agent subclass that connects + * to the specified "HTTP proxy server" in order to proxy HTTP requests. + * + * @api public + */ +class HttpProxyAgent extends agent_base_1.Agent { + constructor(_opts) { + let opts; + if (typeof _opts === 'string') { + opts = url_1.default.parse(_opts); + } + else { + opts = _opts; + } + if (!opts) { + throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!'); + } + debug('Creating new HttpProxyAgent instance: %o', opts); + super(opts); + const proxy = Object.assign({}, opts); + // If `true`, then connect to the proxy server over TLS. + // Defaults to `false`. + this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol); + // Prefer `hostname` over `host`, and set the `port` if needed. + proxy.host = proxy.hostname || proxy.host; + if (typeof proxy.port === 'string') { + proxy.port = parseInt(proxy.port, 10); + } + if (!proxy.port && proxy.host) { + proxy.port = this.secureProxy ? 443 : 80; + } + if (proxy.host && proxy.path) { + // If both a `host` and `path` are specified then it's most likely + // the result of a `url.parse()` call... we need to remove the + // `path` portion so that `net.connect()` doesn't attempt to open + // that as a Unix socket file. + delete proxy.path; + delete proxy.pathname; + } + this.proxy = proxy; + } + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + * + * @api protected + */ + callback(req, opts) { + return __awaiter(this, void 0, void 0, function* () { + const { proxy, secureProxy } = this; + const parsed = url_1.default.parse(req.path); + if (!parsed.protocol) { + parsed.protocol = 'http:'; + } + if (!parsed.hostname) { + parsed.hostname = opts.hostname || opts.host || null; + } + if (parsed.port == null && typeof opts.port) { + parsed.port = String(opts.port); + } + if (parsed.port === '80') { + // if port is 80, then we can remove the port so that the + // ":80" portion is not on the produced URL + parsed.port = ''; + } + // Change the `http.ClientRequest` instance's "path" field + // to the absolute path of the URL that will be requested. + req.path = url_1.default.format(parsed); + // Inject the `Proxy-Authorization` header if necessary. + if (proxy.auth) { + req.setHeader('Proxy-Authorization', `Basic ${Buffer.from(proxy.auth).toString('base64')}`); + } + // Create a socket connection to the proxy server. + let socket; + if (secureProxy) { + debug('Creating `tls.Socket`: %o', proxy); + socket = tls_1.default.connect(proxy); + } + else { + debug('Creating `net.Socket`: %o', proxy); + socket = net_1.default.connect(proxy); + } + // At this point, the http ClientRequest's internal `_header` field + // might have already been set. If this is the case then we'll need + // to re-generate the string since we just changed the `req.path`. + if (req._header) { + let first; + let endOfHeaders; + debug('Regenerating stored HTTP header string for request'); + req._header = null; + req._implicitHeader(); + if (req.output && req.output.length > 0) { + // Node < 12 + debug('Patching connection write() output buffer with updated header'); + first = req.output[0]; + endOfHeaders = first.indexOf('\r\n\r\n') + 4; + req.output[0] = req._header + first.substring(endOfHeaders); + debug('Output buffer: %o', req.output); + } + else if (req.outputData && req.outputData.length > 0) { + // Node >= 12 + debug('Patching connection write() output buffer with updated header'); + first = req.outputData[0].data; + endOfHeaders = first.indexOf('\r\n\r\n') + 4; + req.outputData[0].data = + req._header + first.substring(endOfHeaders); + debug('Output buffer: %o', req.outputData[0].data); + } + } + // Wait for the socket's `connect` event, so that this `callback()` + // function throws instead of the `http` request machinery. This is + // important for i.e. `PacProxyAgent` which determines a failed proxy + // connection via the `callback()` function throwing. + yield (0, once_1.default)(socket, 'connect'); + return socket; + }); + } +} +exports["default"] = HttpProxyAgent; +//# sourceMappingURL=agent.js.map + +/***/ }), + +/***/ 1970: +/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const agent_1 = __importDefault(__nccwpck_require__(7847)); +function createHttpProxyAgent(opts) { + return new agent_1.default(opts); +} +(function (createHttpProxyAgent) { + createHttpProxyAgent.HttpProxyAgent = agent_1.default; + createHttpProxyAgent.prototype = agent_1.default.prototype; +})(createHttpProxyAgent || (createHttpProxyAgent = {})); +module.exports = createHttpProxyAgent; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 2960: +/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const events_1 = __nccwpck_require__(4434); +const debug_1 = __importDefault(__nccwpck_require__(2830)); +const promisify_1 = __importDefault(__nccwpck_require__(2600)); +const debug = debug_1.default('agent-base'); +function isAgent(v) { + return Boolean(v) && typeof v.addRequest === 'function'; +} +function isSecureEndpoint() { + const { stack } = new Error(); + if (typeof stack !== 'string') + return false; + return stack.split('\n').some(l => l.indexOf('(https.js:') !== -1 || l.indexOf('node:https:') !== -1); +} +function createAgent(callback, opts) { + return new createAgent.Agent(callback, opts); +} +(function (createAgent) { + /** + * Base `http.Agent` implementation. + * No pooling/keep-alive is implemented by default. + * + * @param {Function} callback + * @api public + */ + class Agent extends events_1.EventEmitter { + constructor(callback, _opts) { + super(); + let opts = _opts; + if (typeof callback === 'function') { + this.callback = callback; + } + else if (callback) { + opts = callback; + } + // Timeout for the socket to be returned from the callback + this.timeout = null; + if (opts && typeof opts.timeout === 'number') { + this.timeout = opts.timeout; + } + // These aren't actually used by `agent-base`, but are required + // for the TypeScript definition files in `@types/node` :/ + this.maxFreeSockets = 1; + this.maxSockets = 1; + this.maxTotalSockets = Infinity; + this.sockets = {}; + this.freeSockets = {}; + this.requests = {}; + this.options = {}; + } + get defaultPort() { + if (typeof this.explicitDefaultPort === 'number') { + return this.explicitDefaultPort; + } + return isSecureEndpoint() ? 443 : 80; + } + set defaultPort(v) { + this.explicitDefaultPort = v; + } + get protocol() { + if (typeof this.explicitProtocol === 'string') { + return this.explicitProtocol; + } + return isSecureEndpoint() ? 'https:' : 'http:'; + } + set protocol(v) { + this.explicitProtocol = v; + } + callback(req, opts, fn) { + throw new Error('"agent-base" has no default implementation, you must subclass and override `callback()`'); + } + /** + * Called by node-core's "_http_client.js" module when creating + * a new HTTP request with this Agent instance. + * + * @api public + */ + addRequest(req, _opts) { + const opts = Object.assign({}, _opts); + if (typeof opts.secureEndpoint !== 'boolean') { + opts.secureEndpoint = isSecureEndpoint(); + } + if (opts.host == null) { + opts.host = 'localhost'; + } + if (opts.port == null) { + opts.port = opts.secureEndpoint ? 443 : 80; + } + if (opts.protocol == null) { + opts.protocol = opts.secureEndpoint ? 'https:' : 'http:'; + } + if (opts.host && opts.path) { + // If both a `host` and `path` are specified then it's most + // likely the result of a `url.parse()` call... we need to + // remove the `path` portion so that `net.connect()` doesn't + // attempt to open that as a unix socket file. + delete opts.path; + } + delete opts.agent; + delete opts.hostname; + delete opts._defaultAgent; + delete opts.defaultPort; + delete opts.createConnection; + // Hint to use "Connection: close" + // XXX: non-documented `http` module API :( + req._last = true; + req.shouldKeepAlive = false; + let timedOut = false; + let timeoutId = null; + const timeoutMs = opts.timeout || this.timeout; + const onerror = (err) => { + if (req._hadError) + return; + req.emit('error', err); + // For Safety. Some additional errors might fire later on + // and we need to make sure we don't double-fire the error event. + req._hadError = true; + }; + const ontimeout = () => { + timeoutId = null; + timedOut = true; + const err = new Error(`A "socket" was not created for HTTP request before ${timeoutMs}ms`); + err.code = 'ETIMEOUT'; + onerror(err); + }; + const callbackError = (err) => { + if (timedOut) + return; + if (timeoutId !== null) { + clearTimeout(timeoutId); + timeoutId = null; + } + onerror(err); + }; + const onsocket = (socket) => { + if (timedOut) + return; + if (timeoutId != null) { + clearTimeout(timeoutId); + timeoutId = null; + } + if (isAgent(socket)) { + // `socket` is actually an `http.Agent` instance, so + // relinquish responsibility for this `req` to the Agent + // from here on + debug('Callback returned another Agent instance %o', socket.constructor.name); + socket.addRequest(req, opts); + return; + } + if (socket) { + socket.once('free', () => { + this.freeSocket(socket, opts); + }); + req.onSocket(socket); + return; + } + const err = new Error(`no Duplex stream was returned to agent-base for \`${req.method} ${req.path}\``); + onerror(err); + }; + if (typeof this.callback !== 'function') { + onerror(new Error('`callback` is not defined')); + return; + } + if (!this.promisifiedCallback) { + if (this.callback.length >= 3) { + debug('Converting legacy callback function to promise'); + this.promisifiedCallback = promisify_1.default(this.callback); + } + else { + this.promisifiedCallback = this.callback; + } + } + if (typeof timeoutMs === 'number' && timeoutMs > 0) { + timeoutId = setTimeout(ontimeout, timeoutMs); + } + if ('port' in opts && typeof opts.port !== 'number') { + opts.port = Number(opts.port); + } + try { + debug('Resolving socket for %o request: %o', opts.protocol, `${req.method} ${req.path}`); + Promise.resolve(this.promisifiedCallback(req, opts)).then(onsocket, callbackError); + } + catch (err) { + Promise.reject(err).catch(callbackError); + } + } + freeSocket(socket, opts) { + debug('Freeing socket %o %o', socket.constructor.name, opts); + socket.destroy(); + } + destroy() { + debug('Destroying agent %o', this.constructor.name); + } + } + createAgent.Agent = Agent; + // So that `instanceof` works correctly + createAgent.prototype = createAgent.Agent.prototype; +})(createAgent || (createAgent = {})); +module.exports = createAgent; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 2600: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +function promisify(fn) { + return function (req, opts) { + return new Promise((resolve, reject) => { + fn.call(this, req, opts, (err, rtn) => { + if (err) { + reject(err); + } + else { + resolve(rtn); + } + }); + }); + }; +} +exports["default"] = promisify; +//# sourceMappingURL=promisify.js.map + +/***/ }), + +/***/ 3669: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpsProxyAgent = void 0; +const net = __importStar(__nccwpck_require__(9278)); +const tls = __importStar(__nccwpck_require__(4756)); +const assert_1 = __importDefault(__nccwpck_require__(2613)); +const debug_1 = __importDefault(__nccwpck_require__(2830)); +const agent_base_1 = __nccwpck_require__(8894); +const url_1 = __nccwpck_require__(7016); +const parse_proxy_response_1 = __nccwpck_require__(7943); +const debug = (0, debug_1.default)('https-proxy-agent'); +const setServernameFromNonIpHost = (options) => { + if (options.servername === undefined && + options.host && + !net.isIP(options.host)) { + return { + ...options, + servername: options.host, + }; + } + return options; +}; +/** + * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to + * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests. + * + * Outgoing HTTP requests are first tunneled through the proxy server using the + * `CONNECT` HTTP request method to establish a connection to the proxy server, + * and then the proxy server connects to the destination target and issues the + * HTTP request from the proxy server. + * + * `https:` requests have their socket connection upgraded to TLS once + * the connection to the proxy server has been established. + */ +class HttpsProxyAgent extends agent_base_1.Agent { + constructor(proxy, opts) { + super(opts); + this.options = { path: undefined }; + this.proxy = typeof proxy === 'string' ? new url_1.URL(proxy) : proxy; + this.proxyHeaders = opts?.headers ?? {}; + debug('Creating new HttpsProxyAgent instance: %o', this.proxy.href); + // Trim off the brackets from IPv6 addresses + const host = (this.proxy.hostname || this.proxy.host).replace(/^\[|\]$/g, ''); + const port = this.proxy.port + ? parseInt(this.proxy.port, 10) + : this.proxy.protocol === 'https:' + ? 443 + : 80; + this.connectOpts = { + // Attempt to negotiate http/1.1 for proxy servers that support http/2 + ALPNProtocols: ['http/1.1'], + ...(opts ? omit(opts, 'headers') : null), + host, + port, + }; + } + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + */ + async connect(req, opts) { + const { proxy } = this; + if (!opts.host) { + throw new TypeError('No "host" provided'); + } + // Create a socket connection to the proxy server. + let socket; + if (proxy.protocol === 'https:') { + debug('Creating `tls.Socket`: %o', this.connectOpts); + socket = tls.connect(setServernameFromNonIpHost(this.connectOpts)); + } + else { + debug('Creating `net.Socket`: %o', this.connectOpts); + socket = net.connect(this.connectOpts); + } + const headers = typeof this.proxyHeaders === 'function' + ? this.proxyHeaders() + : { ...this.proxyHeaders }; + const host = net.isIPv6(opts.host) ? `[${opts.host}]` : opts.host; + let payload = `CONNECT ${host}:${opts.port} HTTP/1.1\r\n`; + // Inject the `Proxy-Authorization` header if necessary. + if (proxy.username || proxy.password) { + const auth = `${decodeURIComponent(proxy.username)}:${decodeURIComponent(proxy.password)}`; + headers['Proxy-Authorization'] = `Basic ${Buffer.from(auth).toString('base64')}`; + } + headers.Host = `${host}:${opts.port}`; + if (!headers['Proxy-Connection']) { + headers['Proxy-Connection'] = this.keepAlive + ? 'Keep-Alive' + : 'close'; + } + for (const name of Object.keys(headers)) { + payload += `${name}: ${headers[name]}\r\n`; + } + const proxyResponsePromise = (0, parse_proxy_response_1.parseProxyResponse)(socket); + socket.write(`${payload}\r\n`); + const { connect, buffered } = await proxyResponsePromise; + req.emit('proxyConnect', connect); + this.emit('proxyConnect', connect, req); + if (connect.statusCode === 200) { + req.once('socket', resume); + if (opts.secureEndpoint) { + // The proxy is connecting to a TLS server, so upgrade + // this socket connection to a TLS connection. + debug('Upgrading socket connection to TLS'); + return tls.connect({ + ...omit(setServernameFromNonIpHost(opts), 'host', 'path', 'port'), + socket, + }); + } + return socket; + } + // Some other status code that's not 200... need to re-play the HTTP + // header "data" events onto the socket once the HTTP machinery is + // attached so that the node core `http` can parse and handle the + // error status code. + // Close the original socket, and a new "fake" socket is returned + // instead, so that the proxy doesn't get the HTTP request + // written to it (which may contain `Authorization` headers or other + // sensitive data). + // + // See: https://hackerone.com/reports/541502 + socket.destroy(); + const fakeSocket = new net.Socket({ writable: false }); + fakeSocket.readable = true; + // Need to wait for the "socket" event to re-play the "data" events. + req.once('socket', (s) => { + debug('Replaying proxy buffer for failed request'); + (0, assert_1.default)(s.listenerCount('data') > 0); + // Replay the "buffered" Buffer onto the fake `socket`, since at + // this point the HTTP module machinery has been hooked up for + // the user. + s.push(buffered); + s.push(null); + }); + return fakeSocket; + } +} +HttpsProxyAgent.protocols = ['http', 'https']; +exports.HttpsProxyAgent = HttpsProxyAgent; +function resume(socket) { + socket.resume(); +} +function omit(obj, ...keys) { + const ret = {}; + let key; + for (key in obj) { + if (!keys.includes(key)) { + ret[key] = obj[key]; + } + } + return ret; +} +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 7943: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.parseProxyResponse = void 0; +const debug_1 = __importDefault(__nccwpck_require__(2830)); +const debug = (0, debug_1.default)('https-proxy-agent:parse-proxy-response'); +function parseProxyResponse(socket) { + return new Promise((resolve, reject) => { + // we need to buffer any HTTP traffic that happens with the proxy before we get + // the CONNECT response, so that if the response is anything other than an "200" + // response code, then we can re-play the "data" events on the socket once the + // HTTP parser is hooked up... + let buffersLength = 0; + const buffers = []; + function read() { + const b = socket.read(); + if (b) + ondata(b); + else + socket.once('readable', read); + } + function cleanup() { + socket.removeListener('end', onend); + socket.removeListener('error', onerror); + socket.removeListener('readable', read); + } + function onend() { + cleanup(); + debug('onend'); + reject(new Error('Proxy connection ended before receiving CONNECT response')); + } + function onerror(err) { + cleanup(); + debug('onerror %o', err); + reject(err); + } + function ondata(b) { + buffers.push(b); + buffersLength += b.length; + const buffered = Buffer.concat(buffers, buffersLength); + const endOfHeaders = buffered.indexOf('\r\n\r\n'); + if (endOfHeaders === -1) { + // keep buffering + debug('have not received end of HTTP headers yet...'); + read(); + return; + } + const headerParts = buffered + .slice(0, endOfHeaders) + .toString('ascii') + .split('\r\n'); + const firstLine = headerParts.shift(); + if (!firstLine) { + socket.destroy(); + return reject(new Error('No header received from proxy CONNECT response')); + } + const firstLineParts = firstLine.split(' '); + const statusCode = +firstLineParts[1]; + const statusText = firstLineParts.slice(2).join(' '); + const headers = {}; + for (const header of headerParts) { + if (!header) + continue; + const firstColon = header.indexOf(':'); + if (firstColon === -1) { + socket.destroy(); + return reject(new Error(`Invalid header from proxy CONNECT response: "${header}"`)); + } + const key = header.slice(0, firstColon).toLowerCase(); + const value = header.slice(firstColon + 1).trimStart(); + const current = headers[key]; + if (typeof current === 'string') { + headers[key] = [current, value]; + } + else if (Array.isArray(current)) { + current.push(value); + } + else { + headers[key] = value; + } + } + debug('got proxy server response: %o %o', firstLine, headers); + cleanup(); + resolve({ + connect: { + statusCode, + statusText, + headers, + }, + buffered, + }); + } + socket.on('error', onerror); + socket.on('end', onend); + read(); + }); +} +exports.parseProxyResponse = parseProxyResponse; +//# sourceMappingURL=parse-proxy-response.js.map + +/***/ }), + +/***/ 9598: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +try { + var util = __nccwpck_require__(9023); + /* istanbul ignore next */ + if (typeof util.inherits !== 'function') throw ''; + module.exports = util.inherits; +} catch (e) { + /* istanbul ignore next */ + module.exports = __nccwpck_require__(6589); +} + + +/***/ }), + +/***/ 6589: +/***/ ((module) => { + +if (typeof Object.create === 'function') { + // implementation from standard node.js 'util' module + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + ctor.prototype = Object.create(superCtor.prototype, { + constructor: { + value: ctor, + enumerable: false, + writable: true, + configurable: true + } + }) + } + }; +} else { + // old school shim for old browsers + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + var TempCtor = function () {} + TempCtor.prototype = superCtor.prototype + ctor.prototype = new TempCtor() + ctor.prototype.constructor = ctor + } + } +} + + +/***/ }), + +/***/ 6543: +/***/ ((module) => { + +"use strict"; + + +const isStream = stream => + stream !== null && + typeof stream === 'object' && + typeof stream.pipe === 'function'; + +isStream.writable = stream => + isStream(stream) && + stream.writable !== false && + typeof stream._write === 'function' && + typeof stream._writableState === 'object'; + +isStream.readable = stream => + isStream(stream) && + stream.readable !== false && + typeof stream._read === 'function' && + typeof stream._readableState === 'object'; + +isStream.duplex = stream => + isStream.writable(stream) && + isStream.readable(stream); + +isStream.transform = stream => + isStream.duplex(stream) && + typeof stream._transform === 'function'; + +module.exports = isStream; + + +/***/ }), + +/***/ 4826: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var json_stringify = (__nccwpck_require__(3651).stringify); +var json_parse = __nccwpck_require__(3197); + +module.exports = function(options) { + return { + parse: json_parse(options), + stringify: json_stringify + } +}; +//create the default method members with no options applied for backwards compatibility +module.exports.parse = json_parse(); +module.exports.stringify = json_stringify; + + +/***/ }), + +/***/ 3197: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var BigNumber = null; + +// regexpxs extracted from +// (c) BSD-3-Clause +// https://github.com/fastify/secure-json-parse/graphs/contributors and https://github.com/hapijs/bourne/graphs/contributors + +const suspectProtoRx = /(?:_|\\u005[Ff])(?:_|\\u005[Ff])(?:p|\\u0070)(?:r|\\u0072)(?:o|\\u006[Ff])(?:t|\\u0074)(?:o|\\u006[Ff])(?:_|\\u005[Ff])(?:_|\\u005[Ff])/; +const suspectConstructorRx = /(?:c|\\u0063)(?:o|\\u006[Ff])(?:n|\\u006[Ee])(?:s|\\u0073)(?:t|\\u0074)(?:r|\\u0072)(?:u|\\u0075)(?:c|\\u0063)(?:t|\\u0074)(?:o|\\u006[Ff])(?:r|\\u0072)/; + +/* + json_parse.js + 2012-06-20 + + Public Domain. + + NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK. + + This file creates a json_parse function. + During create you can (optionally) specify some behavioural switches + + require('json-bigint')(options) + + The optional options parameter holds switches that drive certain + aspects of the parsing process: + * options.strict = true will warn about duplicate-key usage in the json. + The default (strict = false) will silently ignore those and overwrite + values for keys that are in duplicate use. + + The resulting function follows this signature: + json_parse(text, reviver) + This method parses a JSON text to produce an object or array. + It can throw a SyntaxError exception. + + The optional reviver parameter is a function that can filter and + transform the results. It receives each of the keys and values, + and its return value is used instead of the original value. + If it returns what it received, then the structure is not modified. + If it returns undefined then the member is deleted. + + Example: + + // Parse the text. Values that look like ISO date strings will + // be converted to Date objects. + + myData = json_parse(text, function (key, value) { + var a; + if (typeof value === 'string') { + a = +/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value); + if (a) { + return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4], + +a[5], +a[6])); + } + } + return value; + }); + + This is a reference implementation. You are free to copy, modify, or + redistribute. + + This code should be minified before deployment. + See http://javascript.crockford.com/jsmin.html + + USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO + NOT CONTROL. +*/ + +/*members "", "\"", "\/", "\\", at, b, call, charAt, f, fromCharCode, + hasOwnProperty, message, n, name, prototype, push, r, t, text +*/ + +var json_parse = function (options) { + 'use strict'; + + // This is a function that can parse a JSON text, producing a JavaScript + // data structure. It is a simple, recursive descent parser. It does not use + // eval or regular expressions, so it can be used as a model for implementing + // a JSON parser in other languages. + + // We are defining the function inside of another function to avoid creating + // global variables. + + // Default options one can override by passing options to the parse() + var _options = { + strict: false, // not being strict means do not generate syntax errors for "duplicate key" + storeAsString: false, // toggles whether the values should be stored as BigNumber (default) or a string + alwaysParseAsBig: false, // toggles whether all numbers should be Big + useNativeBigInt: false, // toggles whether to use native BigInt instead of bignumber.js + protoAction: 'error', + constructorAction: 'error', + }; + + // If there are options, then use them to override the default _options + if (options !== undefined && options !== null) { + if (options.strict === true) { + _options.strict = true; + } + if (options.storeAsString === true) { + _options.storeAsString = true; + } + _options.alwaysParseAsBig = + options.alwaysParseAsBig === true ? options.alwaysParseAsBig : false; + _options.useNativeBigInt = + options.useNativeBigInt === true ? options.useNativeBigInt : false; + + if (typeof options.constructorAction !== 'undefined') { + if ( + options.constructorAction === 'error' || + options.constructorAction === 'ignore' || + options.constructorAction === 'preserve' + ) { + _options.constructorAction = options.constructorAction; + } else { + throw new Error( + `Incorrect value for constructorAction option, must be "error", "ignore" or undefined but passed ${options.constructorAction}` + ); + } + } + + if (typeof options.protoAction !== 'undefined') { + if ( + options.protoAction === 'error' || + options.protoAction === 'ignore' || + options.protoAction === 'preserve' + ) { + _options.protoAction = options.protoAction; + } else { + throw new Error( + `Incorrect value for protoAction option, must be "error", "ignore" or undefined but passed ${options.protoAction}` + ); + } + } + } + + var at, // The index of the current character + ch, // The current character + escapee = { + '"': '"', + '\\': '\\', + '/': '/', + b: '\b', + f: '\f', + n: '\n', + r: '\r', + t: '\t', + }, + text, + error = function (m) { + // Call error when something is wrong. + + throw { + name: 'SyntaxError', + message: m, + at: at, + text: text, + }; + }, + next = function (c) { + // If a c parameter is provided, verify that it matches the current character. + + if (c && c !== ch) { + error("Expected '" + c + "' instead of '" + ch + "'"); + } + + // Get the next character. When there are no more characters, + // return the empty string. + + ch = text.charAt(at); + at += 1; + return ch; + }, + number = function () { + // Parse a number value. + + var number, + string = ''; + + if (ch === '-') { + string = '-'; + next('-'); + } + while (ch >= '0' && ch <= '9') { + string += ch; + next(); + } + if (ch === '.') { + string += '.'; + while (next() && ch >= '0' && ch <= '9') { + string += ch; + } + } + if (ch === 'e' || ch === 'E') { + string += ch; + next(); + if (ch === '-' || ch === '+') { + string += ch; + next(); + } + while (ch >= '0' && ch <= '9') { + string += ch; + next(); + } + } + number = +string; + if (!isFinite(number)) { + error('Bad number'); + } else { + if (BigNumber == null) BigNumber = __nccwpck_require__(1259); + //if (number > 9007199254740992 || number < -9007199254740992) + // Bignumber has stricter check: everything with length > 15 digits disallowed + if (string.length > 15) + return _options.storeAsString + ? string + : _options.useNativeBigInt + ? BigInt(string) + : new BigNumber(string); + else + return !_options.alwaysParseAsBig + ? number + : _options.useNativeBigInt + ? BigInt(number) + : new BigNumber(number); + } + }, + string = function () { + // Parse a string value. + + var hex, + i, + string = '', + uffff; + + // When parsing for string values, we must look for " and \ characters. + + if (ch === '"') { + var startAt = at; + while (next()) { + if (ch === '"') { + if (at - 1 > startAt) string += text.substring(startAt, at - 1); + next(); + return string; + } + if (ch === '\\') { + if (at - 1 > startAt) string += text.substring(startAt, at - 1); + next(); + if (ch === 'u') { + uffff = 0; + for (i = 0; i < 4; i += 1) { + hex = parseInt(next(), 16); + if (!isFinite(hex)) { + break; + } + uffff = uffff * 16 + hex; + } + string += String.fromCharCode(uffff); + } else if (typeof escapee[ch] === 'string') { + string += escapee[ch]; + } else { + break; + } + startAt = at; + } + } + } + error('Bad string'); + }, + white = function () { + // Skip whitespace. + + while (ch && ch <= ' ') { + next(); + } + }, + word = function () { + // true, false, or null. + + switch (ch) { + case 't': + next('t'); + next('r'); + next('u'); + next('e'); + return true; + case 'f': + next('f'); + next('a'); + next('l'); + next('s'); + next('e'); + return false; + case 'n': + next('n'); + next('u'); + next('l'); + next('l'); + return null; + } + error("Unexpected '" + ch + "'"); + }, + value, // Place holder for the value function. + array = function () { + // Parse an array value. + + var array = []; + + if (ch === '[') { + next('['); + white(); + if (ch === ']') { + next(']'); + return array; // empty array + } + while (ch) { + array.push(value()); + white(); + if (ch === ']') { + next(']'); + return array; + } + next(','); + white(); + } + } + error('Bad array'); + }, + object = function () { + // Parse an object value. + + var key, + object = Object.create(null); + + if (ch === '{') { + next('{'); + white(); + if (ch === '}') { + next('}'); + return object; // empty object + } + while (ch) { + key = string(); + white(); + next(':'); + if ( + _options.strict === true && + Object.hasOwnProperty.call(object, key) + ) { + error('Duplicate key "' + key + '"'); + } + + if (suspectProtoRx.test(key) === true) { + if (_options.protoAction === 'error') { + error('Object contains forbidden prototype property'); + } else if (_options.protoAction === 'ignore') { + value(); + } else { + object[key] = value(); + } + } else if (suspectConstructorRx.test(key) === true) { + if (_options.constructorAction === 'error') { + error('Object contains forbidden constructor property'); + } else if (_options.constructorAction === 'ignore') { + value(); + } else { + object[key] = value(); + } + } else { + object[key] = value(); + } + + white(); + if (ch === '}') { + next('}'); + return object; + } + next(','); + white(); + } + } + error('Bad object'); + }; + + value = function () { + // Parse a JSON value. It could be an object, an array, a string, a number, + // or a word. + + white(); + switch (ch) { + case '{': + return object(); + case '[': + return array(); + case '"': + return string(); + case '-': + return number(); + default: + return ch >= '0' && ch <= '9' ? number() : word(); + } + }; + + // Return the json_parse function. It will have access to all of the above + // functions and variables. + + return function (source, reviver) { + var result; + + text = source + ''; + at = 0; + ch = ' '; + result = value(); + white(); + if (ch) { + error('Syntax error'); + } + + // If there is a reviver function, we recursively walk the new structure, + // passing each name/value pair to the reviver function for possible + // transformation, starting with a temporary root object that holds the result + // in an empty key. If there is not a reviver function, we simply return the + // result. + + return typeof reviver === 'function' + ? (function walk(holder, key) { + var k, + v, + value = holder[key]; + if (value && typeof value === 'object') { + Object.keys(value).forEach(function (k) { + v = walk(value, k); + if (v !== undefined) { + value[k] = v; + } else { + delete value[k]; + } + }); + } + return reviver.call(holder, key, value); + })({ '': result }, '') + : result; + }; +}; + +module.exports = json_parse; + + +/***/ }), + +/***/ 3651: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var BigNumber = __nccwpck_require__(1259); + +/* + json2.js + 2013-05-26 + + Public Domain. + + NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK. + + See http://www.JSON.org/js.html + + + This code should be minified before deployment. + See http://javascript.crockford.com/jsmin.html + + USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO + NOT CONTROL. + + + This file creates a global JSON object containing two methods: stringify + and parse. + + JSON.stringify(value, replacer, space) + value any JavaScript value, usually an object or array. + + replacer an optional parameter that determines how object + values are stringified for objects. It can be a + function or an array of strings. + + space an optional parameter that specifies the indentation + of nested structures. If it is omitted, the text will + be packed without extra whitespace. If it is a number, + it will specify the number of spaces to indent at each + level. If it is a string (such as '\t' or ' '), + it contains the characters used to indent at each level. + + This method produces a JSON text from a JavaScript value. + + When an object value is found, if the object contains a toJSON + method, its toJSON method will be called and the result will be + stringified. A toJSON method does not serialize: it returns the + value represented by the name/value pair that should be serialized, + or undefined if nothing should be serialized. The toJSON method + will be passed the key associated with the value, and this will be + bound to the value + + For example, this would serialize Dates as ISO strings. + + Date.prototype.toJSON = function (key) { + function f(n) { + // Format integers to have at least two digits. + return n < 10 ? '0' + n : n; + } + + return this.getUTCFullYear() + '-' + + f(this.getUTCMonth() + 1) + '-' + + f(this.getUTCDate()) + 'T' + + f(this.getUTCHours()) + ':' + + f(this.getUTCMinutes()) + ':' + + f(this.getUTCSeconds()) + 'Z'; + }; + + You can provide an optional replacer method. It will be passed the + key and value of each member, with this bound to the containing + object. The value that is returned from your method will be + serialized. If your method returns undefined, then the member will + be excluded from the serialization. + + If the replacer parameter is an array of strings, then it will be + used to select the members to be serialized. It filters the results + such that only members with keys listed in the replacer array are + stringified. + + Values that do not have JSON representations, such as undefined or + functions, will not be serialized. Such values in objects will be + dropped; in arrays they will be replaced with null. You can use + a replacer function to replace those with JSON values. + JSON.stringify(undefined) returns undefined. + + The optional space parameter produces a stringification of the + value that is filled with line breaks and indentation to make it + easier to read. + + If the space parameter is a non-empty string, then that string will + be used for indentation. If the space parameter is a number, then + the indentation will be that many spaces. + + Example: + + text = JSON.stringify(['e', {pluribus: 'unum'}]); + // text is '["e",{"pluribus":"unum"}]' + + + text = JSON.stringify(['e', {pluribus: 'unum'}], null, '\t'); + // text is '[\n\t"e",\n\t{\n\t\t"pluribus": "unum"\n\t}\n]' + + text = JSON.stringify([new Date()], function (key, value) { + return this[key] instanceof Date ? + 'Date(' + this[key] + ')' : value; + }); + // text is '["Date(---current time---)"]' + + + JSON.parse(text, reviver) + This method parses a JSON text to produce an object or array. + It can throw a SyntaxError exception. + + The optional reviver parameter is a function that can filter and + transform the results. It receives each of the keys and values, + and its return value is used instead of the original value. + If it returns what it received, then the structure is not modified. + If it returns undefined then the member is deleted. + + Example: + + // Parse the text. Values that look like ISO date strings will + // be converted to Date objects. + + myData = JSON.parse(text, function (key, value) { + var a; + if (typeof value === 'string') { + a = +/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value); + if (a) { + return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4], + +a[5], +a[6])); + } + } + return value; + }); + + myData = JSON.parse('["Date(09/09/2001)"]', function (key, value) { + var d; + if (typeof value === 'string' && + value.slice(0, 5) === 'Date(' && + value.slice(-1) === ')') { + d = new Date(value.slice(5, -1)); + if (d) { + return d; + } + } + return value; + }); + + + This is a reference implementation. You are free to copy, modify, or + redistribute. +*/ + +/*jslint evil: true, regexp: true */ + +/*members "", "\b", "\t", "\n", "\f", "\r", "\"", JSON, "\\", apply, + call, charCodeAt, getUTCDate, getUTCFullYear, getUTCHours, + getUTCMinutes, getUTCMonth, getUTCSeconds, hasOwnProperty, join, + lastIndex, length, parse, prototype, push, replace, slice, stringify, + test, toJSON, toString, valueOf +*/ + + +// Create a JSON object only if one does not already exist. We create the +// methods in a closure to avoid creating global variables. + +var JSON = module.exports; + +(function () { + 'use strict'; + + function f(n) { + // Format integers to have at least two digits. + return n < 10 ? '0' + n : n; + } + + var cx = /[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, + escapable = /[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, + gap, + indent, + meta = { // table of character substitutions + '\b': '\\b', + '\t': '\\t', + '\n': '\\n', + '\f': '\\f', + '\r': '\\r', + '"' : '\\"', + '\\': '\\\\' + }, + rep; + + + function quote(string) { + +// If the string contains no control characters, no quote characters, and no +// backslash characters, then we can safely slap some quotes around it. +// Otherwise we must also replace the offending characters with safe escape +// sequences. + + escapable.lastIndex = 0; + return escapable.test(string) ? '"' + string.replace(escapable, function (a) { + var c = meta[a]; + return typeof c === 'string' + ? c + : '\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4); + }) + '"' : '"' + string + '"'; + } + + + function str(key, holder) { + +// Produce a string from holder[key]. + + var i, // The loop counter. + k, // The member key. + v, // The member value. + length, + mind = gap, + partial, + value = holder[key], + isBigNumber = value != null && (value instanceof BigNumber || BigNumber.isBigNumber(value)); + +// If the value has a toJSON method, call it to obtain a replacement value. + + if (value && typeof value === 'object' && + typeof value.toJSON === 'function') { + value = value.toJSON(key); + } + +// If we were called with a replacer function, then call the replacer to +// obtain a replacement value. + + if (typeof rep === 'function') { + value = rep.call(holder, key, value); + } + +// What happens next depends on the value's type. + + switch (typeof value) { + case 'string': + if (isBigNumber) { + return value; + } else { + return quote(value); + } + + case 'number': + +// JSON numbers must be finite. Encode non-finite numbers as null. + + return isFinite(value) ? String(value) : 'null'; + + case 'boolean': + case 'null': + case 'bigint': + +// If the value is a boolean or null, convert it to a string. Note: +// typeof null does not produce 'null'. The case is included here in +// the remote chance that this gets fixed someday. + + return String(value); + +// If the type is 'object', we might be dealing with an object or an array or +// null. + + case 'object': + +// Due to a specification blunder in ECMAScript, typeof null is 'object', +// so watch out for that case. + + if (!value) { + return 'null'; + } + +// Make an array to hold the partial results of stringifying this object value. + + gap += indent; + partial = []; + +// Is the value an array? + + if (Object.prototype.toString.apply(value) === '[object Array]') { + +// The value is an array. Stringify every element. Use null as a placeholder +// for non-JSON values. + + length = value.length; + for (i = 0; i < length; i += 1) { + partial[i] = str(i, value) || 'null'; + } + +// Join all of the elements together, separated with commas, and wrap them in +// brackets. + + v = partial.length === 0 + ? '[]' + : gap + ? '[\n' + gap + partial.join(',\n' + gap) + '\n' + mind + ']' + : '[' + partial.join(',') + ']'; + gap = mind; + return v; + } + +// If the replacer is an array, use it to select the members to be stringified. + + if (rep && typeof rep === 'object') { + length = rep.length; + for (i = 0; i < length; i += 1) { + if (typeof rep[i] === 'string') { + k = rep[i]; + v = str(k, value); + if (v) { + partial.push(quote(k) + (gap ? ': ' : ':') + v); + } + } + } + } else { + +// Otherwise, iterate through all of the keys in the object. + + Object.keys(value).forEach(function(k) { + var v = str(k, value); + if (v) { + partial.push(quote(k) + (gap ? ': ' : ':') + v); + } + }); + } + +// Join all of the member texts together, separated with commas, +// and wrap them in braces. + + v = partial.length === 0 + ? '{}' + : gap + ? '{\n' + gap + partial.join(',\n' + gap) + '\n' + mind + '}' + : '{' + partial.join(',') + '}'; + gap = mind; + return v; + } + } + +// If the JSON object does not yet have a stringify method, give it one. + + if (typeof JSON.stringify !== 'function') { + JSON.stringify = function (value, replacer, space) { + +// The stringify method takes a value and an optional replacer, and an optional +// space parameter, and returns a JSON text. The replacer can be a function +// that can replace values, or an array of strings that will select the keys. +// A default replacer method can be provided. Use of the space parameter can +// produce text that is more easily readable. + + var i; + gap = ''; + indent = ''; + +// If the space parameter is a number, make an indent string containing that +// many spaces. + + if (typeof space === 'number') { + for (i = 0; i < space; i += 1) { + indent += ' '; + } + +// If the space parameter is a string, it will be used as the indent string. + + } else if (typeof space === 'string') { + indent = space; + } + +// If there is a replacer, it must be a function or an array. +// Otherwise, throw an error. + + rep = replacer; + if (replacer && typeof replacer !== 'function' && + (typeof replacer !== 'object' || + typeof replacer.length !== 'number')) { + throw new Error('JSON.stringify'); + } + +// Make a fake root object containing our value under the key of ''. +// Return the result of stringifying the value. + + return str('', {'': value}); + }; + } +}()); + + +/***/ }), + +/***/ 8622: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var bufferEqual = __nccwpck_require__(9732); +var Buffer = (__nccwpck_require__(3058).Buffer); +var crypto = __nccwpck_require__(6982); +var formatEcdsa = __nccwpck_require__(325); +var util = __nccwpck_require__(9023); + +var MSG_INVALID_ALGORITHM = '"%s" is not a valid algorithm.\n Supported algorithms are:\n "HS256", "HS384", "HS512", "RS256", "RS384", "RS512", "PS256", "PS384", "PS512", "ES256", "ES384", "ES512" and "none".' +var MSG_INVALID_SECRET = 'secret must be a string or buffer'; +var MSG_INVALID_VERIFIER_KEY = 'key must be a string or a buffer'; +var MSG_INVALID_SIGNER_KEY = 'key must be a string, a buffer or an object'; + +var supportsKeyObjects = typeof crypto.createPublicKey === 'function'; +if (supportsKeyObjects) { + MSG_INVALID_VERIFIER_KEY += ' or a KeyObject'; + MSG_INVALID_SECRET += 'or a KeyObject'; +} + +function checkIsPublicKey(key) { + if (Buffer.isBuffer(key)) { + return; + } + + if (typeof key === 'string') { + return; + } + + if (!supportsKeyObjects) { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key !== 'object') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key.type !== 'string') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key.asymmetricKeyType !== 'string') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key.export !== 'function') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } +}; + +function checkIsPrivateKey(key) { + if (Buffer.isBuffer(key)) { + return; + } + + if (typeof key === 'string') { + return; + } + + if (typeof key === 'object') { + return; + } + + throw typeError(MSG_INVALID_SIGNER_KEY); +}; + +function checkIsSecretKey(key) { + if (Buffer.isBuffer(key)) { + return; + } + + if (typeof key === 'string') { + return key; + } + + if (!supportsKeyObjects) { + throw typeError(MSG_INVALID_SECRET); + } + + if (typeof key !== 'object') { + throw typeError(MSG_INVALID_SECRET); + } + + if (key.type !== 'secret') { + throw typeError(MSG_INVALID_SECRET); + } + + if (typeof key.export !== 'function') { + throw typeError(MSG_INVALID_SECRET); + } +} + +function fromBase64(base64) { + return base64 + .replace(/=/g, '') + .replace(/\+/g, '-') + .replace(/\//g, '_'); +} + +function toBase64(base64url) { + base64url = base64url.toString(); + + var padding = 4 - base64url.length % 4; + if (padding !== 4) { + for (var i = 0; i < padding; ++i) { + base64url += '='; + } + } + + return base64url + .replace(/\-/g, '+') + .replace(/_/g, '/'); +} + +function typeError(template) { + var args = [].slice.call(arguments, 1); + var errMsg = util.format.bind(util, template).apply(null, args); + return new TypeError(errMsg); +} + +function bufferOrString(obj) { + return Buffer.isBuffer(obj) || typeof obj === 'string'; +} + +function normalizeInput(thing) { + if (!bufferOrString(thing)) + thing = JSON.stringify(thing); + return thing; +} + +function createHmacSigner(bits) { + return function sign(thing, secret) { + checkIsSecretKey(secret); + thing = normalizeInput(thing); + var hmac = crypto.createHmac('sha' + bits, secret); + var sig = (hmac.update(thing), hmac.digest('base64')) + return fromBase64(sig); + } +} + +function createHmacVerifier(bits) { + return function verify(thing, signature, secret) { + var computedSig = createHmacSigner(bits)(thing, secret); + return bufferEqual(Buffer.from(signature), Buffer.from(computedSig)); + } +} + +function createKeySigner(bits) { + return function sign(thing, privateKey) { + checkIsPrivateKey(privateKey); + thing = normalizeInput(thing); + // Even though we are specifying "RSA" here, this works with ECDSA + // keys as well. + var signer = crypto.createSign('RSA-SHA' + bits); + var sig = (signer.update(thing), signer.sign(privateKey, 'base64')); + return fromBase64(sig); + } +} + +function createKeyVerifier(bits) { + return function verify(thing, signature, publicKey) { + checkIsPublicKey(publicKey); + thing = normalizeInput(thing); + signature = toBase64(signature); + var verifier = crypto.createVerify('RSA-SHA' + bits); + verifier.update(thing); + return verifier.verify(publicKey, signature, 'base64'); + } +} + +function createPSSKeySigner(bits) { + return function sign(thing, privateKey) { + checkIsPrivateKey(privateKey); + thing = normalizeInput(thing); + var signer = crypto.createSign('RSA-SHA' + bits); + var sig = (signer.update(thing), signer.sign({ + key: privateKey, + padding: crypto.constants.RSA_PKCS1_PSS_PADDING, + saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST + }, 'base64')); + return fromBase64(sig); + } +} + +function createPSSKeyVerifier(bits) { + return function verify(thing, signature, publicKey) { + checkIsPublicKey(publicKey); + thing = normalizeInput(thing); + signature = toBase64(signature); + var verifier = crypto.createVerify('RSA-SHA' + bits); + verifier.update(thing); + return verifier.verify({ + key: publicKey, + padding: crypto.constants.RSA_PKCS1_PSS_PADDING, + saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST + }, signature, 'base64'); + } +} + +function createECDSASigner(bits) { + var inner = createKeySigner(bits); + return function sign() { + var signature = inner.apply(null, arguments); + signature = formatEcdsa.derToJose(signature, 'ES' + bits); + return signature; + }; +} + +function createECDSAVerifer(bits) { + var inner = createKeyVerifier(bits); + return function verify(thing, signature, publicKey) { + signature = formatEcdsa.joseToDer(signature, 'ES' + bits).toString('base64'); + var result = inner(thing, signature, publicKey); + return result; + }; +} + +function createNoneSigner() { + return function sign() { + return ''; + } +} + +function createNoneVerifier() { + return function verify(thing, signature) { + return signature === ''; + } +} + +module.exports = function jwa(algorithm) { + var signerFactories = { + hs: createHmacSigner, + rs: createKeySigner, + ps: createPSSKeySigner, + es: createECDSASigner, + none: createNoneSigner, + } + var verifierFactories = { + hs: createHmacVerifier, + rs: createKeyVerifier, + ps: createPSSKeyVerifier, + es: createECDSAVerifer, + none: createNoneVerifier, + } + var match = algorithm.match(/^(RS|PS|ES|HS)(256|384|512)$|^(none)$/); + if (!match) + throw typeError(MSG_INVALID_ALGORITHM, algorithm); + var algo = (match[1] || match[3]).toLowerCase(); + var bits = match[2]; + + return { + sign: signerFactories[algo](bits), + verify: verifierFactories[algo](bits), + } +}; + + +/***/ }), + +/***/ 3324: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +/*global exports*/ +var SignStream = __nccwpck_require__(8600); +var VerifyStream = __nccwpck_require__(4368); + +var ALGORITHMS = [ + 'HS256', 'HS384', 'HS512', + 'RS256', 'RS384', 'RS512', + 'PS256', 'PS384', 'PS512', + 'ES256', 'ES384', 'ES512' +]; + +exports.ALGORITHMS = ALGORITHMS; +exports.sign = SignStream.sign; +exports.verify = VerifyStream.verify; +exports.decode = VerifyStream.decode; +exports.isValid = VerifyStream.isValid; +exports.createSign = function createSign(opts) { + return new SignStream(opts); +}; +exports.createVerify = function createVerify(opts) { + return new VerifyStream(opts); +}; + + +/***/ }), + +/***/ 1831: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/*global module, process*/ +var Buffer = (__nccwpck_require__(3058).Buffer); +var Stream = __nccwpck_require__(2203); +var util = __nccwpck_require__(9023); + +function DataStream(data) { + this.buffer = null; + this.writable = true; + this.readable = true; + + // No input + if (!data) { + this.buffer = Buffer.alloc(0); + return this; + } + + // Stream + if (typeof data.pipe === 'function') { + this.buffer = Buffer.alloc(0); + data.pipe(this); + return this; + } + + // Buffer or String + // or Object (assumedly a passworded key) + if (data.length || typeof data === 'object') { + this.buffer = data; + this.writable = false; + process.nextTick(function () { + this.emit('end', data); + this.readable = false; + this.emit('close'); + }.bind(this)); + return this; + } + + throw new TypeError('Unexpected data type ('+ typeof data + ')'); +} +util.inherits(DataStream, Stream); + +DataStream.prototype.write = function write(data) { + this.buffer = Buffer.concat([this.buffer, Buffer.from(data)]); + this.emit('data', data); +}; + +DataStream.prototype.end = function end(data) { + if (data) + this.write(data); + this.emit('end', data); + this.emit('close'); + this.writable = false; + this.readable = false; +}; + +module.exports = DataStream; + + +/***/ }), + +/***/ 8600: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/*global module*/ +var Buffer = (__nccwpck_require__(3058).Buffer); +var DataStream = __nccwpck_require__(1831); +var jwa = __nccwpck_require__(8622); +var Stream = __nccwpck_require__(2203); +var toString = __nccwpck_require__(5126); +var util = __nccwpck_require__(9023); + +function base64url(string, encoding) { + return Buffer + .from(string, encoding) + .toString('base64') + .replace(/=/g, '') + .replace(/\+/g, '-') + .replace(/\//g, '_'); +} + +function jwsSecuredInput(header, payload, encoding) { + encoding = encoding || 'utf8'; + var encodedHeader = base64url(toString(header), 'binary'); + var encodedPayload = base64url(toString(payload), encoding); + return util.format('%s.%s', encodedHeader, encodedPayload); +} + +function jwsSign(opts) { + var header = opts.header; + var payload = opts.payload; + var secretOrKey = opts.secret || opts.privateKey; + var encoding = opts.encoding; + var algo = jwa(header.alg); + var securedInput = jwsSecuredInput(header, payload, encoding); + var signature = algo.sign(securedInput, secretOrKey); + return util.format('%s.%s', securedInput, signature); +} + +function SignStream(opts) { + var secret = opts.secret||opts.privateKey||opts.key; + var secretStream = new DataStream(secret); + this.readable = true; + this.header = opts.header; + this.encoding = opts.encoding; + this.secret = this.privateKey = this.key = secretStream; + this.payload = new DataStream(opts.payload); + this.secret.once('close', function () { + if (!this.payload.writable && this.readable) + this.sign(); + }.bind(this)); + + this.payload.once('close', function () { + if (!this.secret.writable && this.readable) + this.sign(); + }.bind(this)); +} +util.inherits(SignStream, Stream); + +SignStream.prototype.sign = function sign() { + try { + var signature = jwsSign({ + header: this.header, + payload: this.payload.buffer, + secret: this.secret.buffer, + encoding: this.encoding + }); + this.emit('done', signature); + this.emit('data', signature); + this.emit('end'); + this.readable = false; + return signature; + } catch (e) { + this.readable = false; + this.emit('error', e); + this.emit('close'); + } +}; + +SignStream.sign = jwsSign; + +module.exports = SignStream; + + +/***/ }), + +/***/ 5126: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/*global module*/ +var Buffer = (__nccwpck_require__(181).Buffer); + +module.exports = function toString(obj) { + if (typeof obj === 'string') + return obj; + if (typeof obj === 'number' || Buffer.isBuffer(obj)) + return obj.toString(); + return JSON.stringify(obj); +}; + + +/***/ }), + +/***/ 4368: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/*global module*/ +var Buffer = (__nccwpck_require__(3058).Buffer); +var DataStream = __nccwpck_require__(1831); +var jwa = __nccwpck_require__(8622); +var Stream = __nccwpck_require__(2203); +var toString = __nccwpck_require__(5126); +var util = __nccwpck_require__(9023); +var JWS_REGEX = /^[a-zA-Z0-9\-_]+?\.[a-zA-Z0-9\-_]+?\.([a-zA-Z0-9\-_]+)?$/; + +function isObject(thing) { + return Object.prototype.toString.call(thing) === '[object Object]'; +} + +function safeJsonParse(thing) { + if (isObject(thing)) + return thing; + try { return JSON.parse(thing); } + catch (e) { return undefined; } +} + +function headerFromJWS(jwsSig) { + var encodedHeader = jwsSig.split('.', 1)[0]; + return safeJsonParse(Buffer.from(encodedHeader, 'base64').toString('binary')); +} + +function securedInputFromJWS(jwsSig) { + return jwsSig.split('.', 2).join('.'); +} + +function signatureFromJWS(jwsSig) { + return jwsSig.split('.')[2]; +} + +function payloadFromJWS(jwsSig, encoding) { + encoding = encoding || 'utf8'; + var payload = jwsSig.split('.')[1]; + return Buffer.from(payload, 'base64').toString(encoding); +} + +function isValidJws(string) { + return JWS_REGEX.test(string) && !!headerFromJWS(string); +} + +function jwsVerify(jwsSig, algorithm, secretOrKey) { + if (!algorithm) { + var err = new Error("Missing algorithm parameter for jws.verify"); + err.code = "MISSING_ALGORITHM"; + throw err; + } + jwsSig = toString(jwsSig); + var signature = signatureFromJWS(jwsSig); + var securedInput = securedInputFromJWS(jwsSig); + var algo = jwa(algorithm); + return algo.verify(securedInput, signature, secretOrKey); +} + +function jwsDecode(jwsSig, opts) { + opts = opts || {}; + jwsSig = toString(jwsSig); + + if (!isValidJws(jwsSig)) + return null; + + var header = headerFromJWS(jwsSig); + + if (!header) + return null; + + var payload = payloadFromJWS(jwsSig); + if (header.typ === 'JWT' || opts.json) + payload = JSON.parse(payload, opts.encoding); + + return { + header: header, + payload: payload, + signature: signatureFromJWS(jwsSig) + }; +} + +function VerifyStream(opts) { + opts = opts || {}; + var secretOrKey = opts.secret||opts.publicKey||opts.key; + var secretStream = new DataStream(secretOrKey); + this.readable = true; + this.algorithm = opts.algorithm; + this.encoding = opts.encoding; + this.secret = this.publicKey = this.key = secretStream; + this.signature = new DataStream(opts.signature); + this.secret.once('close', function () { + if (!this.signature.writable && this.readable) + this.verify(); + }.bind(this)); + + this.signature.once('close', function () { + if (!this.secret.writable && this.readable) + this.verify(); + }.bind(this)); +} +util.inherits(VerifyStream, Stream); +VerifyStream.prototype.verify = function verify() { + try { + var valid = jwsVerify(this.signature.buffer, this.algorithm, this.key.buffer); + var obj = jwsDecode(this.signature.buffer, this.encoding); + this.emit('done', valid, obj); + this.emit('data', valid); + this.emit('end'); + this.readable = false; + return valid; + } catch (e) { + this.readable = false; + this.emit('error', e); + this.emit('close'); + } +}; + +VerifyStream.decode = jwsDecode; +VerifyStream.isValid = isValidJws; +VerifyStream.verify = jwsVerify; + +module.exports = VerifyStream; + + /***/ }), /***/ 9829: @@ -51564,6 +71403,137 @@ function populateMaps (extensions, types) { } +/***/ }), + +/***/ 4402: +/***/ ((module) => { + +"use strict"; + + +/** + * @param typeMap [Object] Map of MIME type -> Array[extensions] + * @param ... + */ +function Mime() { + this._types = Object.create(null); + this._extensions = Object.create(null); + + for (let i = 0; i < arguments.length; i++) { + this.define(arguments[i]); + } + + this.define = this.define.bind(this); + this.getType = this.getType.bind(this); + this.getExtension = this.getExtension.bind(this); +} + +/** + * Define mimetype -> extension mappings. Each key is a mime-type that maps + * to an array of extensions associated with the type. The first extension is + * used as the default extension for the type. + * + * e.g. mime.define({'audio/ogg', ['oga', 'ogg', 'spx']}); + * + * If a type declares an extension that has already been defined, an error will + * be thrown. To suppress this error and force the extension to be associated + * with the new type, pass `force`=true. Alternatively, you may prefix the + * extension with "*" to map the type to extension, without mapping the + * extension to the type. + * + * e.g. mime.define({'audio/wav', ['wav']}, {'audio/x-wav', ['*wav']}); + * + * + * @param map (Object) type definitions + * @param force (Boolean) if true, force overriding of existing definitions + */ +Mime.prototype.define = function(typeMap, force) { + for (let type in typeMap) { + let extensions = typeMap[type].map(function(t) { + return t.toLowerCase(); + }); + type = type.toLowerCase(); + + for (let i = 0; i < extensions.length; i++) { + const ext = extensions[i]; + + // '*' prefix = not the preferred type for this extension. So fixup the + // extension, and skip it. + if (ext[0] === '*') { + continue; + } + + if (!force && (ext in this._types)) { + throw new Error( + 'Attempt to change mapping for "' + ext + + '" extension from "' + this._types[ext] + '" to "' + type + + '". Pass `force=true` to allow this, otherwise remove "' + ext + + '" from the list of extensions for "' + type + '".' + ); + } + + this._types[ext] = type; + } + + // Use first extension as default + if (force || !this._extensions[type]) { + const ext = extensions[0]; + this._extensions[type] = (ext[0] !== '*') ? ext : ext.substr(1); + } + } +}; + +/** + * Lookup a mime type based on extension + */ +Mime.prototype.getType = function(path) { + path = String(path); + let last = path.replace(/^.*[/\\]/, '').toLowerCase(); + let ext = last.replace(/^.*\./, '').toLowerCase(); + + let hasPath = last.length < path.length; + let hasDot = ext.length < last.length - 1; + + return (hasDot || !hasPath) && this._types[ext] || null; +}; + +/** + * Return file extension associated with a mime type + */ +Mime.prototype.getExtension = function(type) { + type = /^\s*([^;\s]*)/.test(type) && RegExp.$1; + return type && this._extensions[type.toLowerCase()] || null; +}; + +module.exports = Mime; + + +/***/ }), + +/***/ 4900: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +let Mime = __nccwpck_require__(4402); +module.exports = new Mime(__nccwpck_require__(3725), __nccwpck_require__(8548)); + + +/***/ }), + +/***/ 8548: +/***/ ((module) => { + +module.exports = {"application/prs.cww":["cww"],"application/vnd.1000minds.decision-model+xml":["1km"],"application/vnd.3gpp.pic-bw-large":["plb"],"application/vnd.3gpp.pic-bw-small":["psb"],"application/vnd.3gpp.pic-bw-var":["pvb"],"application/vnd.3gpp2.tcap":["tcap"],"application/vnd.3m.post-it-notes":["pwn"],"application/vnd.accpac.simply.aso":["aso"],"application/vnd.accpac.simply.imp":["imp"],"application/vnd.acucobol":["acu"],"application/vnd.acucorp":["atc","acutc"],"application/vnd.adobe.air-application-installer-package+zip":["air"],"application/vnd.adobe.formscentral.fcdt":["fcdt"],"application/vnd.adobe.fxp":["fxp","fxpl"],"application/vnd.adobe.xdp+xml":["xdp"],"application/vnd.adobe.xfdf":["xfdf"],"application/vnd.ahead.space":["ahead"],"application/vnd.airzip.filesecure.azf":["azf"],"application/vnd.airzip.filesecure.azs":["azs"],"application/vnd.amazon.ebook":["azw"],"application/vnd.americandynamics.acc":["acc"],"application/vnd.amiga.ami":["ami"],"application/vnd.android.package-archive":["apk"],"application/vnd.anser-web-certificate-issue-initiation":["cii"],"application/vnd.anser-web-funds-transfer-initiation":["fti"],"application/vnd.antix.game-component":["atx"],"application/vnd.apple.installer+xml":["mpkg"],"application/vnd.apple.keynote":["key"],"application/vnd.apple.mpegurl":["m3u8"],"application/vnd.apple.numbers":["numbers"],"application/vnd.apple.pages":["pages"],"application/vnd.apple.pkpass":["pkpass"],"application/vnd.aristanetworks.swi":["swi"],"application/vnd.astraea-software.iota":["iota"],"application/vnd.audiograph":["aep"],"application/vnd.balsamiq.bmml+xml":["bmml"],"application/vnd.blueice.multipass":["mpm"],"application/vnd.bmi":["bmi"],"application/vnd.businessobjects":["rep"],"application/vnd.chemdraw+xml":["cdxml"],"application/vnd.chipnuts.karaoke-mmd":["mmd"],"application/vnd.cinderella":["cdy"],"application/vnd.citationstyles.style+xml":["csl"],"application/vnd.claymore":["cla"],"application/vnd.cloanto.rp9":["rp9"],"application/vnd.clonk.c4group":["c4g","c4d","c4f","c4p","c4u"],"application/vnd.cluetrust.cartomobile-config":["c11amc"],"application/vnd.cluetrust.cartomobile-config-pkg":["c11amz"],"application/vnd.commonspace":["csp"],"application/vnd.contact.cmsg":["cdbcmsg"],"application/vnd.cosmocaller":["cmc"],"application/vnd.crick.clicker":["clkx"],"application/vnd.crick.clicker.keyboard":["clkk"],"application/vnd.crick.clicker.palette":["clkp"],"application/vnd.crick.clicker.template":["clkt"],"application/vnd.crick.clicker.wordbank":["clkw"],"application/vnd.criticaltools.wbs+xml":["wbs"],"application/vnd.ctc-posml":["pml"],"application/vnd.cups-ppd":["ppd"],"application/vnd.curl.car":["car"],"application/vnd.curl.pcurl":["pcurl"],"application/vnd.dart":["dart"],"application/vnd.data-vision.rdz":["rdz"],"application/vnd.dbf":["dbf"],"application/vnd.dece.data":["uvf","uvvf","uvd","uvvd"],"application/vnd.dece.ttml+xml":["uvt","uvvt"],"application/vnd.dece.unspecified":["uvx","uvvx"],"application/vnd.dece.zip":["uvz","uvvz"],"application/vnd.denovo.fcselayout-link":["fe_launch"],"application/vnd.dna":["dna"],"application/vnd.dolby.mlp":["mlp"],"application/vnd.dpgraph":["dpg"],"application/vnd.dreamfactory":["dfac"],"application/vnd.ds-keypoint":["kpxx"],"application/vnd.dvb.ait":["ait"],"application/vnd.dvb.service":["svc"],"application/vnd.dynageo":["geo"],"application/vnd.ecowin.chart":["mag"],"application/vnd.enliven":["nml"],"application/vnd.epson.esf":["esf"],"application/vnd.epson.msf":["msf"],"application/vnd.epson.quickanime":["qam"],"application/vnd.epson.salt":["slt"],"application/vnd.epson.ssf":["ssf"],"application/vnd.eszigno3+xml":["es3","et3"],"application/vnd.ezpix-album":["ez2"],"application/vnd.ezpix-package":["ez3"],"application/vnd.fdf":["fdf"],"application/vnd.fdsn.mseed":["mseed"],"application/vnd.fdsn.seed":["seed","dataless"],"application/vnd.flographit":["gph"],"application/vnd.fluxtime.clip":["ftc"],"application/vnd.framemaker":["fm","frame","maker","book"],"application/vnd.frogans.fnc":["fnc"],"application/vnd.frogans.ltf":["ltf"],"application/vnd.fsc.weblaunch":["fsc"],"application/vnd.fujitsu.oasys":["oas"],"application/vnd.fujitsu.oasys2":["oa2"],"application/vnd.fujitsu.oasys3":["oa3"],"application/vnd.fujitsu.oasysgp":["fg5"],"application/vnd.fujitsu.oasysprs":["bh2"],"application/vnd.fujixerox.ddd":["ddd"],"application/vnd.fujixerox.docuworks":["xdw"],"application/vnd.fujixerox.docuworks.binder":["xbd"],"application/vnd.fuzzysheet":["fzs"],"application/vnd.genomatix.tuxedo":["txd"],"application/vnd.geogebra.file":["ggb"],"application/vnd.geogebra.tool":["ggt"],"application/vnd.geometry-explorer":["gex","gre"],"application/vnd.geonext":["gxt"],"application/vnd.geoplan":["g2w"],"application/vnd.geospace":["g3w"],"application/vnd.gmx":["gmx"],"application/vnd.google-apps.document":["gdoc"],"application/vnd.google-apps.presentation":["gslides"],"application/vnd.google-apps.spreadsheet":["gsheet"],"application/vnd.google-earth.kml+xml":["kml"],"application/vnd.google-earth.kmz":["kmz"],"application/vnd.grafeq":["gqf","gqs"],"application/vnd.groove-account":["gac"],"application/vnd.groove-help":["ghf"],"application/vnd.groove-identity-message":["gim"],"application/vnd.groove-injector":["grv"],"application/vnd.groove-tool-message":["gtm"],"application/vnd.groove-tool-template":["tpl"],"application/vnd.groove-vcard":["vcg"],"application/vnd.hal+xml":["hal"],"application/vnd.handheld-entertainment+xml":["zmm"],"application/vnd.hbci":["hbci"],"application/vnd.hhe.lesson-player":["les"],"application/vnd.hp-hpgl":["hpgl"],"application/vnd.hp-hpid":["hpid"],"application/vnd.hp-hps":["hps"],"application/vnd.hp-jlyt":["jlt"],"application/vnd.hp-pcl":["pcl"],"application/vnd.hp-pclxl":["pclxl"],"application/vnd.hydrostatix.sof-data":["sfd-hdstx"],"application/vnd.ibm.minipay":["mpy"],"application/vnd.ibm.modcap":["afp","listafp","list3820"],"application/vnd.ibm.rights-management":["irm"],"application/vnd.ibm.secure-container":["sc"],"application/vnd.iccprofile":["icc","icm"],"application/vnd.igloader":["igl"],"application/vnd.immervision-ivp":["ivp"],"application/vnd.immervision-ivu":["ivu"],"application/vnd.insors.igm":["igm"],"application/vnd.intercon.formnet":["xpw","xpx"],"application/vnd.intergeo":["i2g"],"application/vnd.intu.qbo":["qbo"],"application/vnd.intu.qfx":["qfx"],"application/vnd.ipunplugged.rcprofile":["rcprofile"],"application/vnd.irepository.package+xml":["irp"],"application/vnd.is-xpr":["xpr"],"application/vnd.isac.fcs":["fcs"],"application/vnd.jam":["jam"],"application/vnd.jcp.javame.midlet-rms":["rms"],"application/vnd.jisp":["jisp"],"application/vnd.joost.joda-archive":["joda"],"application/vnd.kahootz":["ktz","ktr"],"application/vnd.kde.karbon":["karbon"],"application/vnd.kde.kchart":["chrt"],"application/vnd.kde.kformula":["kfo"],"application/vnd.kde.kivio":["flw"],"application/vnd.kde.kontour":["kon"],"application/vnd.kde.kpresenter":["kpr","kpt"],"application/vnd.kde.kspread":["ksp"],"application/vnd.kde.kword":["kwd","kwt"],"application/vnd.kenameaapp":["htke"],"application/vnd.kidspiration":["kia"],"application/vnd.kinar":["kne","knp"],"application/vnd.koan":["skp","skd","skt","skm"],"application/vnd.kodak-descriptor":["sse"],"application/vnd.las.las+xml":["lasxml"],"application/vnd.llamagraphics.life-balance.desktop":["lbd"],"application/vnd.llamagraphics.life-balance.exchange+xml":["lbe"],"application/vnd.lotus-1-2-3":["123"],"application/vnd.lotus-approach":["apr"],"application/vnd.lotus-freelance":["pre"],"application/vnd.lotus-notes":["nsf"],"application/vnd.lotus-organizer":["org"],"application/vnd.lotus-screencam":["scm"],"application/vnd.lotus-wordpro":["lwp"],"application/vnd.macports.portpkg":["portpkg"],"application/vnd.mapbox-vector-tile":["mvt"],"application/vnd.mcd":["mcd"],"application/vnd.medcalcdata":["mc1"],"application/vnd.mediastation.cdkey":["cdkey"],"application/vnd.mfer":["mwf"],"application/vnd.mfmp":["mfm"],"application/vnd.micrografx.flo":["flo"],"application/vnd.micrografx.igx":["igx"],"application/vnd.mif":["mif"],"application/vnd.mobius.daf":["daf"],"application/vnd.mobius.dis":["dis"],"application/vnd.mobius.mbk":["mbk"],"application/vnd.mobius.mqy":["mqy"],"application/vnd.mobius.msl":["msl"],"application/vnd.mobius.plc":["plc"],"application/vnd.mobius.txf":["txf"],"application/vnd.mophun.application":["mpn"],"application/vnd.mophun.certificate":["mpc"],"application/vnd.mozilla.xul+xml":["xul"],"application/vnd.ms-artgalry":["cil"],"application/vnd.ms-cab-compressed":["cab"],"application/vnd.ms-excel":["xls","xlm","xla","xlc","xlt","xlw"],"application/vnd.ms-excel.addin.macroenabled.12":["xlam"],"application/vnd.ms-excel.sheet.binary.macroenabled.12":["xlsb"],"application/vnd.ms-excel.sheet.macroenabled.12":["xlsm"],"application/vnd.ms-excel.template.macroenabled.12":["xltm"],"application/vnd.ms-fontobject":["eot"],"application/vnd.ms-htmlhelp":["chm"],"application/vnd.ms-ims":["ims"],"application/vnd.ms-lrm":["lrm"],"application/vnd.ms-officetheme":["thmx"],"application/vnd.ms-outlook":["msg"],"application/vnd.ms-pki.seccat":["cat"],"application/vnd.ms-pki.stl":["*stl"],"application/vnd.ms-powerpoint":["ppt","pps","pot"],"application/vnd.ms-powerpoint.addin.macroenabled.12":["ppam"],"application/vnd.ms-powerpoint.presentation.macroenabled.12":["pptm"],"application/vnd.ms-powerpoint.slide.macroenabled.12":["sldm"],"application/vnd.ms-powerpoint.slideshow.macroenabled.12":["ppsm"],"application/vnd.ms-powerpoint.template.macroenabled.12":["potm"],"application/vnd.ms-project":["mpp","mpt"],"application/vnd.ms-word.document.macroenabled.12":["docm"],"application/vnd.ms-word.template.macroenabled.12":["dotm"],"application/vnd.ms-works":["wps","wks","wcm","wdb"],"application/vnd.ms-wpl":["wpl"],"application/vnd.ms-xpsdocument":["xps"],"application/vnd.mseq":["mseq"],"application/vnd.musician":["mus"],"application/vnd.muvee.style":["msty"],"application/vnd.mynfc":["taglet"],"application/vnd.neurolanguage.nlu":["nlu"],"application/vnd.nitf":["ntf","nitf"],"application/vnd.noblenet-directory":["nnd"],"application/vnd.noblenet-sealer":["nns"],"application/vnd.noblenet-web":["nnw"],"application/vnd.nokia.n-gage.ac+xml":["*ac"],"application/vnd.nokia.n-gage.data":["ngdat"],"application/vnd.nokia.n-gage.symbian.install":["n-gage"],"application/vnd.nokia.radio-preset":["rpst"],"application/vnd.nokia.radio-presets":["rpss"],"application/vnd.novadigm.edm":["edm"],"application/vnd.novadigm.edx":["edx"],"application/vnd.novadigm.ext":["ext"],"application/vnd.oasis.opendocument.chart":["odc"],"application/vnd.oasis.opendocument.chart-template":["otc"],"application/vnd.oasis.opendocument.database":["odb"],"application/vnd.oasis.opendocument.formula":["odf"],"application/vnd.oasis.opendocument.formula-template":["odft"],"application/vnd.oasis.opendocument.graphics":["odg"],"application/vnd.oasis.opendocument.graphics-template":["otg"],"application/vnd.oasis.opendocument.image":["odi"],"application/vnd.oasis.opendocument.image-template":["oti"],"application/vnd.oasis.opendocument.presentation":["odp"],"application/vnd.oasis.opendocument.presentation-template":["otp"],"application/vnd.oasis.opendocument.spreadsheet":["ods"],"application/vnd.oasis.opendocument.spreadsheet-template":["ots"],"application/vnd.oasis.opendocument.text":["odt"],"application/vnd.oasis.opendocument.text-master":["odm"],"application/vnd.oasis.opendocument.text-template":["ott"],"application/vnd.oasis.opendocument.text-web":["oth"],"application/vnd.olpc-sugar":["xo"],"application/vnd.oma.dd2+xml":["dd2"],"application/vnd.openblox.game+xml":["obgx"],"application/vnd.openofficeorg.extension":["oxt"],"application/vnd.openstreetmap.data+xml":["osm"],"application/vnd.openxmlformats-officedocument.presentationml.presentation":["pptx"],"application/vnd.openxmlformats-officedocument.presentationml.slide":["sldx"],"application/vnd.openxmlformats-officedocument.presentationml.slideshow":["ppsx"],"application/vnd.openxmlformats-officedocument.presentationml.template":["potx"],"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet":["xlsx"],"application/vnd.openxmlformats-officedocument.spreadsheetml.template":["xltx"],"application/vnd.openxmlformats-officedocument.wordprocessingml.document":["docx"],"application/vnd.openxmlformats-officedocument.wordprocessingml.template":["dotx"],"application/vnd.osgeo.mapguide.package":["mgp"],"application/vnd.osgi.dp":["dp"],"application/vnd.osgi.subsystem":["esa"],"application/vnd.palm":["pdb","pqa","oprc"],"application/vnd.pawaafile":["paw"],"application/vnd.pg.format":["str"],"application/vnd.pg.osasli":["ei6"],"application/vnd.picsel":["efif"],"application/vnd.pmi.widget":["wg"],"application/vnd.pocketlearn":["plf"],"application/vnd.powerbuilder6":["pbd"],"application/vnd.previewsystems.box":["box"],"application/vnd.proteus.magazine":["mgz"],"application/vnd.publishare-delta-tree":["qps"],"application/vnd.pvi.ptid1":["ptid"],"application/vnd.quark.quarkxpress":["qxd","qxt","qwd","qwt","qxl","qxb"],"application/vnd.rar":["rar"],"application/vnd.realvnc.bed":["bed"],"application/vnd.recordare.musicxml":["mxl"],"application/vnd.recordare.musicxml+xml":["musicxml"],"application/vnd.rig.cryptonote":["cryptonote"],"application/vnd.rim.cod":["cod"],"application/vnd.rn-realmedia":["rm"],"application/vnd.rn-realmedia-vbr":["rmvb"],"application/vnd.route66.link66+xml":["link66"],"application/vnd.sailingtracker.track":["st"],"application/vnd.seemail":["see"],"application/vnd.sema":["sema"],"application/vnd.semd":["semd"],"application/vnd.semf":["semf"],"application/vnd.shana.informed.formdata":["ifm"],"application/vnd.shana.informed.formtemplate":["itp"],"application/vnd.shana.informed.interchange":["iif"],"application/vnd.shana.informed.package":["ipk"],"application/vnd.simtech-mindmapper":["twd","twds"],"application/vnd.smaf":["mmf"],"application/vnd.smart.teacher":["teacher"],"application/vnd.software602.filler.form+xml":["fo"],"application/vnd.solent.sdkm+xml":["sdkm","sdkd"],"application/vnd.spotfire.dxp":["dxp"],"application/vnd.spotfire.sfs":["sfs"],"application/vnd.stardivision.calc":["sdc"],"application/vnd.stardivision.draw":["sda"],"application/vnd.stardivision.impress":["sdd"],"application/vnd.stardivision.math":["smf"],"application/vnd.stardivision.writer":["sdw","vor"],"application/vnd.stardivision.writer-global":["sgl"],"application/vnd.stepmania.package":["smzip"],"application/vnd.stepmania.stepchart":["sm"],"application/vnd.sun.wadl+xml":["wadl"],"application/vnd.sun.xml.calc":["sxc"],"application/vnd.sun.xml.calc.template":["stc"],"application/vnd.sun.xml.draw":["sxd"],"application/vnd.sun.xml.draw.template":["std"],"application/vnd.sun.xml.impress":["sxi"],"application/vnd.sun.xml.impress.template":["sti"],"application/vnd.sun.xml.math":["sxm"],"application/vnd.sun.xml.writer":["sxw"],"application/vnd.sun.xml.writer.global":["sxg"],"application/vnd.sun.xml.writer.template":["stw"],"application/vnd.sus-calendar":["sus","susp"],"application/vnd.svd":["svd"],"application/vnd.symbian.install":["sis","sisx"],"application/vnd.syncml+xml":["xsm"],"application/vnd.syncml.dm+wbxml":["bdm"],"application/vnd.syncml.dm+xml":["xdm"],"application/vnd.syncml.dmddf+xml":["ddf"],"application/vnd.tao.intent-module-archive":["tao"],"application/vnd.tcpdump.pcap":["pcap","cap","dmp"],"application/vnd.tmobile-livetv":["tmo"],"application/vnd.trid.tpt":["tpt"],"application/vnd.triscape.mxs":["mxs"],"application/vnd.trueapp":["tra"],"application/vnd.ufdl":["ufd","ufdl"],"application/vnd.uiq.theme":["utz"],"application/vnd.umajin":["umj"],"application/vnd.unity":["unityweb"],"application/vnd.uoml+xml":["uoml"],"application/vnd.vcx":["vcx"],"application/vnd.visio":["vsd","vst","vss","vsw"],"application/vnd.visionary":["vis"],"application/vnd.vsf":["vsf"],"application/vnd.wap.wbxml":["wbxml"],"application/vnd.wap.wmlc":["wmlc"],"application/vnd.wap.wmlscriptc":["wmlsc"],"application/vnd.webturbo":["wtb"],"application/vnd.wolfram.player":["nbp"],"application/vnd.wordperfect":["wpd"],"application/vnd.wqd":["wqd"],"application/vnd.wt.stf":["stf"],"application/vnd.xara":["xar"],"application/vnd.xfdl":["xfdl"],"application/vnd.yamaha.hv-dic":["hvd"],"application/vnd.yamaha.hv-script":["hvs"],"application/vnd.yamaha.hv-voice":["hvp"],"application/vnd.yamaha.openscoreformat":["osf"],"application/vnd.yamaha.openscoreformat.osfpvg+xml":["osfpvg"],"application/vnd.yamaha.smaf-audio":["saf"],"application/vnd.yamaha.smaf-phrase":["spf"],"application/vnd.yellowriver-custom-menu":["cmp"],"application/vnd.zul":["zir","zirz"],"application/vnd.zzazz.deck+xml":["zaz"],"application/x-7z-compressed":["7z"],"application/x-abiword":["abw"],"application/x-ace-compressed":["ace"],"application/x-apple-diskimage":["*dmg"],"application/x-arj":["arj"],"application/x-authorware-bin":["aab","x32","u32","vox"],"application/x-authorware-map":["aam"],"application/x-authorware-seg":["aas"],"application/x-bcpio":["bcpio"],"application/x-bdoc":["*bdoc"],"application/x-bittorrent":["torrent"],"application/x-blorb":["blb","blorb"],"application/x-bzip":["bz"],"application/x-bzip2":["bz2","boz"],"application/x-cbr":["cbr","cba","cbt","cbz","cb7"],"application/x-cdlink":["vcd"],"application/x-cfs-compressed":["cfs"],"application/x-chat":["chat"],"application/x-chess-pgn":["pgn"],"application/x-chrome-extension":["crx"],"application/x-cocoa":["cco"],"application/x-conference":["nsc"],"application/x-cpio":["cpio"],"application/x-csh":["csh"],"application/x-debian-package":["*deb","udeb"],"application/x-dgc-compressed":["dgc"],"application/x-director":["dir","dcr","dxr","cst","cct","cxt","w3d","fgd","swa"],"application/x-doom":["wad"],"application/x-dtbncx+xml":["ncx"],"application/x-dtbook+xml":["dtb"],"application/x-dtbresource+xml":["res"],"application/x-dvi":["dvi"],"application/x-envoy":["evy"],"application/x-eva":["eva"],"application/x-font-bdf":["bdf"],"application/x-font-ghostscript":["gsf"],"application/x-font-linux-psf":["psf"],"application/x-font-pcf":["pcf"],"application/x-font-snf":["snf"],"application/x-font-type1":["pfa","pfb","pfm","afm"],"application/x-freearc":["arc"],"application/x-futuresplash":["spl"],"application/x-gca-compressed":["gca"],"application/x-glulx":["ulx"],"application/x-gnumeric":["gnumeric"],"application/x-gramps-xml":["gramps"],"application/x-gtar":["gtar"],"application/x-hdf":["hdf"],"application/x-httpd-php":["php"],"application/x-install-instructions":["install"],"application/x-iso9660-image":["*iso"],"application/x-iwork-keynote-sffkey":["*key"],"application/x-iwork-numbers-sffnumbers":["*numbers"],"application/x-iwork-pages-sffpages":["*pages"],"application/x-java-archive-diff":["jardiff"],"application/x-java-jnlp-file":["jnlp"],"application/x-keepass2":["kdbx"],"application/x-latex":["latex"],"application/x-lua-bytecode":["luac"],"application/x-lzh-compressed":["lzh","lha"],"application/x-makeself":["run"],"application/x-mie":["mie"],"application/x-mobipocket-ebook":["prc","mobi"],"application/x-ms-application":["application"],"application/x-ms-shortcut":["lnk"],"application/x-ms-wmd":["wmd"],"application/x-ms-wmz":["wmz"],"application/x-ms-xbap":["xbap"],"application/x-msaccess":["mdb"],"application/x-msbinder":["obd"],"application/x-mscardfile":["crd"],"application/x-msclip":["clp"],"application/x-msdos-program":["*exe"],"application/x-msdownload":["*exe","*dll","com","bat","*msi"],"application/x-msmediaview":["mvb","m13","m14"],"application/x-msmetafile":["*wmf","*wmz","*emf","emz"],"application/x-msmoney":["mny"],"application/x-mspublisher":["pub"],"application/x-msschedule":["scd"],"application/x-msterminal":["trm"],"application/x-mswrite":["wri"],"application/x-netcdf":["nc","cdf"],"application/x-ns-proxy-autoconfig":["pac"],"application/x-nzb":["nzb"],"application/x-perl":["pl","pm"],"application/x-pilot":["*prc","*pdb"],"application/x-pkcs12":["p12","pfx"],"application/x-pkcs7-certificates":["p7b","spc"],"application/x-pkcs7-certreqresp":["p7r"],"application/x-rar-compressed":["*rar"],"application/x-redhat-package-manager":["rpm"],"application/x-research-info-systems":["ris"],"application/x-sea":["sea"],"application/x-sh":["sh"],"application/x-shar":["shar"],"application/x-shockwave-flash":["swf"],"application/x-silverlight-app":["xap"],"application/x-sql":["sql"],"application/x-stuffit":["sit"],"application/x-stuffitx":["sitx"],"application/x-subrip":["srt"],"application/x-sv4cpio":["sv4cpio"],"application/x-sv4crc":["sv4crc"],"application/x-t3vm-image":["t3"],"application/x-tads":["gam"],"application/x-tar":["tar"],"application/x-tcl":["tcl","tk"],"application/x-tex":["tex"],"application/x-tex-tfm":["tfm"],"application/x-texinfo":["texinfo","texi"],"application/x-tgif":["*obj"],"application/x-ustar":["ustar"],"application/x-virtualbox-hdd":["hdd"],"application/x-virtualbox-ova":["ova"],"application/x-virtualbox-ovf":["ovf"],"application/x-virtualbox-vbox":["vbox"],"application/x-virtualbox-vbox-extpack":["vbox-extpack"],"application/x-virtualbox-vdi":["vdi"],"application/x-virtualbox-vhd":["vhd"],"application/x-virtualbox-vmdk":["vmdk"],"application/x-wais-source":["src"],"application/x-web-app-manifest+json":["webapp"],"application/x-x509-ca-cert":["der","crt","pem"],"application/x-xfig":["fig"],"application/x-xliff+xml":["*xlf"],"application/x-xpinstall":["xpi"],"application/x-xz":["xz"],"application/x-zmachine":["z1","z2","z3","z4","z5","z6","z7","z8"],"audio/vnd.dece.audio":["uva","uvva"],"audio/vnd.digital-winds":["eol"],"audio/vnd.dra":["dra"],"audio/vnd.dts":["dts"],"audio/vnd.dts.hd":["dtshd"],"audio/vnd.lucent.voice":["lvp"],"audio/vnd.ms-playready.media.pya":["pya"],"audio/vnd.nuera.ecelp4800":["ecelp4800"],"audio/vnd.nuera.ecelp7470":["ecelp7470"],"audio/vnd.nuera.ecelp9600":["ecelp9600"],"audio/vnd.rip":["rip"],"audio/x-aac":["aac"],"audio/x-aiff":["aif","aiff","aifc"],"audio/x-caf":["caf"],"audio/x-flac":["flac"],"audio/x-m4a":["*m4a"],"audio/x-matroska":["mka"],"audio/x-mpegurl":["m3u"],"audio/x-ms-wax":["wax"],"audio/x-ms-wma":["wma"],"audio/x-pn-realaudio":["ram","ra"],"audio/x-pn-realaudio-plugin":["rmp"],"audio/x-realaudio":["*ra"],"audio/x-wav":["*wav"],"chemical/x-cdx":["cdx"],"chemical/x-cif":["cif"],"chemical/x-cmdf":["cmdf"],"chemical/x-cml":["cml"],"chemical/x-csml":["csml"],"chemical/x-xyz":["xyz"],"image/prs.btif":["btif"],"image/prs.pti":["pti"],"image/vnd.adobe.photoshop":["psd"],"image/vnd.airzip.accelerator.azv":["azv"],"image/vnd.dece.graphic":["uvi","uvvi","uvg","uvvg"],"image/vnd.djvu":["djvu","djv"],"image/vnd.dvb.subtitle":["*sub"],"image/vnd.dwg":["dwg"],"image/vnd.dxf":["dxf"],"image/vnd.fastbidsheet":["fbs"],"image/vnd.fpx":["fpx"],"image/vnd.fst":["fst"],"image/vnd.fujixerox.edmics-mmr":["mmr"],"image/vnd.fujixerox.edmics-rlc":["rlc"],"image/vnd.microsoft.icon":["ico"],"image/vnd.ms-dds":["dds"],"image/vnd.ms-modi":["mdi"],"image/vnd.ms-photo":["wdp"],"image/vnd.net-fpx":["npx"],"image/vnd.pco.b16":["b16"],"image/vnd.tencent.tap":["tap"],"image/vnd.valve.source.texture":["vtf"],"image/vnd.wap.wbmp":["wbmp"],"image/vnd.xiff":["xif"],"image/vnd.zbrush.pcx":["pcx"],"image/x-3ds":["3ds"],"image/x-cmu-raster":["ras"],"image/x-cmx":["cmx"],"image/x-freehand":["fh","fhc","fh4","fh5","fh7"],"image/x-icon":["*ico"],"image/x-jng":["jng"],"image/x-mrsid-image":["sid"],"image/x-ms-bmp":["*bmp"],"image/x-pcx":["*pcx"],"image/x-pict":["pic","pct"],"image/x-portable-anymap":["pnm"],"image/x-portable-bitmap":["pbm"],"image/x-portable-graymap":["pgm"],"image/x-portable-pixmap":["ppm"],"image/x-rgb":["rgb"],"image/x-tga":["tga"],"image/x-xbitmap":["xbm"],"image/x-xpixmap":["xpm"],"image/x-xwindowdump":["xwd"],"message/vnd.wfa.wsc":["wsc"],"model/vnd.collada+xml":["dae"],"model/vnd.dwf":["dwf"],"model/vnd.gdl":["gdl"],"model/vnd.gtw":["gtw"],"model/vnd.mts":["mts"],"model/vnd.opengex":["ogex"],"model/vnd.parasolid.transmit.binary":["x_b"],"model/vnd.parasolid.transmit.text":["x_t"],"model/vnd.sap.vds":["vds"],"model/vnd.usdz+zip":["usdz"],"model/vnd.valve.source.compiled-map":["bsp"],"model/vnd.vtu":["vtu"],"text/prs.lines.tag":["dsc"],"text/vnd.curl":["curl"],"text/vnd.curl.dcurl":["dcurl"],"text/vnd.curl.mcurl":["mcurl"],"text/vnd.curl.scurl":["scurl"],"text/vnd.dvb.subtitle":["sub"],"text/vnd.fly":["fly"],"text/vnd.fmi.flexstor":["flx"],"text/vnd.graphviz":["gv"],"text/vnd.in3d.3dml":["3dml"],"text/vnd.in3d.spot":["spot"],"text/vnd.sun.j2me.app-descriptor":["jad"],"text/vnd.wap.wml":["wml"],"text/vnd.wap.wmlscript":["wmls"],"text/x-asm":["s","asm"],"text/x-c":["c","cc","cxx","cpp","h","hh","dic"],"text/x-component":["htc"],"text/x-fortran":["f","for","f77","f90"],"text/x-handlebars-template":["hbs"],"text/x-java-source":["java"],"text/x-lua":["lua"],"text/x-markdown":["mkd"],"text/x-nfo":["nfo"],"text/x-opml":["opml"],"text/x-org":["*org"],"text/x-pascal":["p","pas"],"text/x-processing":["pde"],"text/x-sass":["sass"],"text/x-scss":["scss"],"text/x-setext":["etx"],"text/x-sfv":["sfv"],"text/x-suse-ymp":["ymp"],"text/x-uuencode":["uu"],"text/x-vcalendar":["vcs"],"text/x-vcard":["vcf"],"video/vnd.dece.hd":["uvh","uvvh"],"video/vnd.dece.mobile":["uvm","uvvm"],"video/vnd.dece.pd":["uvp","uvvp"],"video/vnd.dece.sd":["uvs","uvvs"],"video/vnd.dece.video":["uvv","uvvv"],"video/vnd.dvb.file":["dvb"],"video/vnd.fvt":["fvt"],"video/vnd.mpegurl":["mxu","m4u"],"video/vnd.ms-playready.media.pyv":["pyv"],"video/vnd.uvvu.mp4":["uvu","uvvu"],"video/vnd.vivo":["viv"],"video/x-f4v":["f4v"],"video/x-fli":["fli"],"video/x-flv":["flv"],"video/x-m4v":["m4v"],"video/x-matroska":["mkv","mk3d","mks"],"video/x-mng":["mng"],"video/x-ms-asf":["asf","asx"],"video/x-ms-vob":["vob"],"video/x-ms-wm":["wm"],"video/x-ms-wmv":["wmv"],"video/x-ms-wmx":["wmx"],"video/x-ms-wvx":["wvx"],"video/x-msvideo":["avi"],"video/x-sgi-movie":["movie"],"video/x-smv":["smv"],"x-conference/x-cooltalk":["ice"]}; + +/***/ }), + +/***/ 3725: +/***/ ((module) => { + +module.exports = {"application/andrew-inset":["ez"],"application/applixware":["aw"],"application/atom+xml":["atom"],"application/atomcat+xml":["atomcat"],"application/atomdeleted+xml":["atomdeleted"],"application/atomsvc+xml":["atomsvc"],"application/atsc-dwd+xml":["dwd"],"application/atsc-held+xml":["held"],"application/atsc-rsat+xml":["rsat"],"application/bdoc":["bdoc"],"application/calendar+xml":["xcs"],"application/ccxml+xml":["ccxml"],"application/cdfx+xml":["cdfx"],"application/cdmi-capability":["cdmia"],"application/cdmi-container":["cdmic"],"application/cdmi-domain":["cdmid"],"application/cdmi-object":["cdmio"],"application/cdmi-queue":["cdmiq"],"application/cu-seeme":["cu"],"application/dash+xml":["mpd"],"application/davmount+xml":["davmount"],"application/docbook+xml":["dbk"],"application/dssc+der":["dssc"],"application/dssc+xml":["xdssc"],"application/ecmascript":["es","ecma"],"application/emma+xml":["emma"],"application/emotionml+xml":["emotionml"],"application/epub+zip":["epub"],"application/exi":["exi"],"application/express":["exp"],"application/fdt+xml":["fdt"],"application/font-tdpfr":["pfr"],"application/geo+json":["geojson"],"application/gml+xml":["gml"],"application/gpx+xml":["gpx"],"application/gxf":["gxf"],"application/gzip":["gz"],"application/hjson":["hjson"],"application/hyperstudio":["stk"],"application/inkml+xml":["ink","inkml"],"application/ipfix":["ipfix"],"application/its+xml":["its"],"application/java-archive":["jar","war","ear"],"application/java-serialized-object":["ser"],"application/java-vm":["class"],"application/javascript":["js","mjs"],"application/json":["json","map"],"application/json5":["json5"],"application/jsonml+json":["jsonml"],"application/ld+json":["jsonld"],"application/lgr+xml":["lgr"],"application/lost+xml":["lostxml"],"application/mac-binhex40":["hqx"],"application/mac-compactpro":["cpt"],"application/mads+xml":["mads"],"application/manifest+json":["webmanifest"],"application/marc":["mrc"],"application/marcxml+xml":["mrcx"],"application/mathematica":["ma","nb","mb"],"application/mathml+xml":["mathml"],"application/mbox":["mbox"],"application/mediaservercontrol+xml":["mscml"],"application/metalink+xml":["metalink"],"application/metalink4+xml":["meta4"],"application/mets+xml":["mets"],"application/mmt-aei+xml":["maei"],"application/mmt-usd+xml":["musd"],"application/mods+xml":["mods"],"application/mp21":["m21","mp21"],"application/mp4":["mp4s","m4p"],"application/msword":["doc","dot"],"application/mxf":["mxf"],"application/n-quads":["nq"],"application/n-triples":["nt"],"application/node":["cjs"],"application/octet-stream":["bin","dms","lrf","mar","so","dist","distz","pkg","bpk","dump","elc","deploy","exe","dll","deb","dmg","iso","img","msi","msp","msm","buffer"],"application/oda":["oda"],"application/oebps-package+xml":["opf"],"application/ogg":["ogx"],"application/omdoc+xml":["omdoc"],"application/onenote":["onetoc","onetoc2","onetmp","onepkg"],"application/oxps":["oxps"],"application/p2p-overlay+xml":["relo"],"application/patch-ops-error+xml":["xer"],"application/pdf":["pdf"],"application/pgp-encrypted":["pgp"],"application/pgp-signature":["asc","sig"],"application/pics-rules":["prf"],"application/pkcs10":["p10"],"application/pkcs7-mime":["p7m","p7c"],"application/pkcs7-signature":["p7s"],"application/pkcs8":["p8"],"application/pkix-attr-cert":["ac"],"application/pkix-cert":["cer"],"application/pkix-crl":["crl"],"application/pkix-pkipath":["pkipath"],"application/pkixcmp":["pki"],"application/pls+xml":["pls"],"application/postscript":["ai","eps","ps"],"application/provenance+xml":["provx"],"application/pskc+xml":["pskcxml"],"application/raml+yaml":["raml"],"application/rdf+xml":["rdf","owl"],"application/reginfo+xml":["rif"],"application/relax-ng-compact-syntax":["rnc"],"application/resource-lists+xml":["rl"],"application/resource-lists-diff+xml":["rld"],"application/rls-services+xml":["rs"],"application/route-apd+xml":["rapd"],"application/route-s-tsid+xml":["sls"],"application/route-usd+xml":["rusd"],"application/rpki-ghostbusters":["gbr"],"application/rpki-manifest":["mft"],"application/rpki-roa":["roa"],"application/rsd+xml":["rsd"],"application/rss+xml":["rss"],"application/rtf":["rtf"],"application/sbml+xml":["sbml"],"application/scvp-cv-request":["scq"],"application/scvp-cv-response":["scs"],"application/scvp-vp-request":["spq"],"application/scvp-vp-response":["spp"],"application/sdp":["sdp"],"application/senml+xml":["senmlx"],"application/sensml+xml":["sensmlx"],"application/set-payment-initiation":["setpay"],"application/set-registration-initiation":["setreg"],"application/shf+xml":["shf"],"application/sieve":["siv","sieve"],"application/smil+xml":["smi","smil"],"application/sparql-query":["rq"],"application/sparql-results+xml":["srx"],"application/srgs":["gram"],"application/srgs+xml":["grxml"],"application/sru+xml":["sru"],"application/ssdl+xml":["ssdl"],"application/ssml+xml":["ssml"],"application/swid+xml":["swidtag"],"application/tei+xml":["tei","teicorpus"],"application/thraud+xml":["tfi"],"application/timestamped-data":["tsd"],"application/toml":["toml"],"application/trig":["trig"],"application/ttml+xml":["ttml"],"application/ubjson":["ubj"],"application/urc-ressheet+xml":["rsheet"],"application/urc-targetdesc+xml":["td"],"application/voicexml+xml":["vxml"],"application/wasm":["wasm"],"application/widget":["wgt"],"application/winhlp":["hlp"],"application/wsdl+xml":["wsdl"],"application/wspolicy+xml":["wspolicy"],"application/xaml+xml":["xaml"],"application/xcap-att+xml":["xav"],"application/xcap-caps+xml":["xca"],"application/xcap-diff+xml":["xdf"],"application/xcap-el+xml":["xel"],"application/xcap-ns+xml":["xns"],"application/xenc+xml":["xenc"],"application/xhtml+xml":["xhtml","xht"],"application/xliff+xml":["xlf"],"application/xml":["xml","xsl","xsd","rng"],"application/xml-dtd":["dtd"],"application/xop+xml":["xop"],"application/xproc+xml":["xpl"],"application/xslt+xml":["*xsl","xslt"],"application/xspf+xml":["xspf"],"application/xv+xml":["mxml","xhvml","xvml","xvm"],"application/yang":["yang"],"application/yin+xml":["yin"],"application/zip":["zip"],"audio/3gpp":["*3gpp"],"audio/adpcm":["adp"],"audio/amr":["amr"],"audio/basic":["au","snd"],"audio/midi":["mid","midi","kar","rmi"],"audio/mobile-xmf":["mxmf"],"audio/mp3":["*mp3"],"audio/mp4":["m4a","mp4a"],"audio/mpeg":["mpga","mp2","mp2a","mp3","m2a","m3a"],"audio/ogg":["oga","ogg","spx","opus"],"audio/s3m":["s3m"],"audio/silk":["sil"],"audio/wav":["wav"],"audio/wave":["*wav"],"audio/webm":["weba"],"audio/xm":["xm"],"font/collection":["ttc"],"font/otf":["otf"],"font/ttf":["ttf"],"font/woff":["woff"],"font/woff2":["woff2"],"image/aces":["exr"],"image/apng":["apng"],"image/avif":["avif"],"image/bmp":["bmp"],"image/cgm":["cgm"],"image/dicom-rle":["drle"],"image/emf":["emf"],"image/fits":["fits"],"image/g3fax":["g3"],"image/gif":["gif"],"image/heic":["heic"],"image/heic-sequence":["heics"],"image/heif":["heif"],"image/heif-sequence":["heifs"],"image/hej2k":["hej2"],"image/hsj2":["hsj2"],"image/ief":["ief"],"image/jls":["jls"],"image/jp2":["jp2","jpg2"],"image/jpeg":["jpeg","jpg","jpe"],"image/jph":["jph"],"image/jphc":["jhc"],"image/jpm":["jpm"],"image/jpx":["jpx","jpf"],"image/jxr":["jxr"],"image/jxra":["jxra"],"image/jxrs":["jxrs"],"image/jxs":["jxs"],"image/jxsc":["jxsc"],"image/jxsi":["jxsi"],"image/jxss":["jxss"],"image/ktx":["ktx"],"image/ktx2":["ktx2"],"image/png":["png"],"image/sgi":["sgi"],"image/svg+xml":["svg","svgz"],"image/t38":["t38"],"image/tiff":["tif","tiff"],"image/tiff-fx":["tfx"],"image/webp":["webp"],"image/wmf":["wmf"],"message/disposition-notification":["disposition-notification"],"message/global":["u8msg"],"message/global-delivery-status":["u8dsn"],"message/global-disposition-notification":["u8mdn"],"message/global-headers":["u8hdr"],"message/rfc822":["eml","mime"],"model/3mf":["3mf"],"model/gltf+json":["gltf"],"model/gltf-binary":["glb"],"model/iges":["igs","iges"],"model/mesh":["msh","mesh","silo"],"model/mtl":["mtl"],"model/obj":["obj"],"model/step+xml":["stpx"],"model/step+zip":["stpz"],"model/step-xml+zip":["stpxz"],"model/stl":["stl"],"model/vrml":["wrl","vrml"],"model/x3d+binary":["*x3db","x3dbz"],"model/x3d+fastinfoset":["x3db"],"model/x3d+vrml":["*x3dv","x3dvz"],"model/x3d+xml":["x3d","x3dz"],"model/x3d-vrml":["x3dv"],"text/cache-manifest":["appcache","manifest"],"text/calendar":["ics","ifb"],"text/coffeescript":["coffee","litcoffee"],"text/css":["css"],"text/csv":["csv"],"text/html":["html","htm","shtml"],"text/jade":["jade"],"text/jsx":["jsx"],"text/less":["less"],"text/markdown":["markdown","md"],"text/mathml":["mml"],"text/mdx":["mdx"],"text/n3":["n3"],"text/plain":["txt","text","conf","def","list","log","in","ini"],"text/richtext":["rtx"],"text/rtf":["*rtf"],"text/sgml":["sgml","sgm"],"text/shex":["shex"],"text/slim":["slim","slm"],"text/spdx":["spdx"],"text/stylus":["stylus","styl"],"text/tab-separated-values":["tsv"],"text/troff":["t","tr","roff","man","me","ms"],"text/turtle":["ttl"],"text/uri-list":["uri","uris","urls"],"text/vcard":["vcard"],"text/vtt":["vtt"],"text/xml":["*xml"],"text/yaml":["yaml","yml"],"video/3gpp":["3gp","3gpp"],"video/3gpp2":["3g2"],"video/h261":["h261"],"video/h263":["h263"],"video/h264":["h264"],"video/iso.segment":["m4s"],"video/jpeg":["jpgv"],"video/jpm":["*jpm","jpgm"],"video/mj2":["mj2","mjp2"],"video/mp2t":["ts"],"video/mp4":["mp4","mp4v","mpg4"],"video/mpeg":["mpeg","mpg","mpe","m1v","m2v"],"video/ogg":["ogv"],"video/quicktime":["qt","mov"],"video/webm":["webm"]}; + /***/ }), /***/ 3772: @@ -52518,6 +72488,175 @@ function regExpEscape (s) { } +/***/ }), + +/***/ 744: +/***/ ((module) => { + +/** + * Helpers. + */ + +var s = 1000; +var m = s * 60; +var h = m * 60; +var d = h * 24; +var w = d * 7; +var y = d * 365.25; + +/** + * Parse or format the given `val`. + * + * Options: + * + * - `long` verbose formatting [false] + * + * @param {String|Number} val + * @param {Object} [options] + * @throws {Error} throw an error if val is not a non-empty string or a number + * @return {String|Number} + * @api public + */ + +module.exports = function(val, options) { + options = options || {}; + var type = typeof val; + if (type === 'string' && val.length > 0) { + return parse(val); + } else if (type === 'number' && isFinite(val)) { + return options.long ? fmtLong(val) : fmtShort(val); + } + throw new Error( + 'val is not a non-empty string or a valid number. val=' + + JSON.stringify(val) + ); +}; + +/** + * Parse the given `str` and return milliseconds. + * + * @param {String} str + * @return {Number} + * @api private + */ + +function parse(str) { + str = String(str); + if (str.length > 100) { + return; + } + var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( + str + ); + if (!match) { + return; + } + var n = parseFloat(match[1]); + var type = (match[2] || 'ms').toLowerCase(); + switch (type) { + case 'years': + case 'year': + case 'yrs': + case 'yr': + case 'y': + return n * y; + case 'weeks': + case 'week': + case 'w': + return n * w; + case 'days': + case 'day': + case 'd': + return n * d; + case 'hours': + case 'hour': + case 'hrs': + case 'hr': + case 'h': + return n * h; + case 'minutes': + case 'minute': + case 'mins': + case 'min': + case 'm': + return n * m; + case 'seconds': + case 'second': + case 'secs': + case 'sec': + case 's': + return n * s; + case 'milliseconds': + case 'millisecond': + case 'msecs': + case 'msec': + case 'ms': + return n; + default: + return undefined; + } +} + +/** + * Short format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtShort(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return Math.round(ms / d) + 'd'; + } + if (msAbs >= h) { + return Math.round(ms / h) + 'h'; + } + if (msAbs >= m) { + return Math.round(ms / m) + 'm'; + } + if (msAbs >= s) { + return Math.round(ms / s) + 's'; + } + return ms + 'ms'; +} + +/** + * Long format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtLong(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return plural(ms, msAbs, d, 'day'); + } + if (msAbs >= h) { + return plural(ms, msAbs, h, 'hour'); + } + if (msAbs >= m) { + return plural(ms, msAbs, m, 'minute'); + } + if (msAbs >= s) { + return plural(ms, msAbs, s, 'second'); + } + return ms + ' ms'; +} + +/** + * Pluralization helper. + */ + +function plural(ms, msAbs, n, name) { + var isPlural = msAbs >= n * 1.5; + return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : ''); +} + + /***/ }), /***/ 6705: @@ -53891,10 +74030,6 @@ function getNodeRequestOptions(request) { agent = agent(parsedURL); } - if (!headers.has('Connection') && !agent) { - headers.set('Connection', 'close'); - } - // HTTP-network fetch step 4.2 // chunked encoding is handled by Node.js @@ -53943,6 +74078,20 @@ const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); }; +/** + * isSameProtocol reports whether the two provided URLs use the same protocol. + * + * Both domains must already be in canonical form. + * @param {string|URL} original + * @param {string|URL} destination + */ +const isSameProtocol = function isSameProtocol(destination, original) { + const orig = new URL$1(original).protocol; + const dest = new URL$1(destination).protocol; + + return orig === dest; +}; + /** * Fetch function * @@ -53974,7 +74123,7 @@ function fetch(url, opts) { let error = new AbortError('The user aborted a request.'); reject(error); if (request.body && request.body instanceof Stream.Readable) { - request.body.destroy(error); + destroyStream(request.body, error); } if (!response || !response.body) return; response.body.emit('error', error); @@ -54015,9 +74164,43 @@ function fetch(url, opts) { req.on('error', function (err) { reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); + + if (response && response.body) { + destroyStream(response.body, err); + } + finalize(); }); + fixResponseChunkedTransferBadEnding(req, function (err) { + if (signal && signal.aborted) { + return; + } + + if (response && response.body) { + destroyStream(response.body, err); + } + }); + + /* c8 ignore next 18 */ + if (parseInt(process.version.substring(1)) < 14) { + // Before Node.js 14, pipeline() does not fully support async iterators and does not always + // properly handle when the socket close/end events are out of order. + req.on('socket', function (s) { + s.addListener('close', function (hadError) { + // if a data listener is still present we didn't end cleanly + const hasDataListener = s.listenerCount('data') > 0; + + // if end happened before close but the socket didn't emit an error, do it now + if (response && hasDataListener && !hadError && !(signal && signal.aborted)) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + response.body.emit('error', err); + } + }); + }); + } + req.on('response', function (res) { clearTimeout(reqTimeout); @@ -54089,7 +74272,7 @@ function fetch(url, opts) { size: request.size }; - if (!isDomainOrSubdomain(request.url, locationURL)) { + if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) { for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { requestOpts.headers.delete(name); } @@ -54182,6 +74365,13 @@ function fetch(url, opts) { response = new Response(body, response_options); resolve(response); }); + raw.on('end', function () { + // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted. + if (!response) { + response = new Response(body, response_options); + resolve(response); + } + }); return; } @@ -54201,6 +74391,44 @@ function fetch(url, opts) { writeToStream(req, request); }); } +function fixResponseChunkedTransferBadEnding(request, errorCallback) { + let socket; + + request.on('socket', function (s) { + socket = s; + }); + + request.on('response', function (response) { + const headers = response.headers; + + if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) { + response.once('close', function (hadError) { + // tests for socket presence, as in some situations the + // the 'socket' event is not triggered for the request + // (happens in deno), avoids `TypeError` + // if a data listener is still present we didn't end cleanly + const hasDataListener = socket && socket.listenerCount('data') > 0; + + if (hasDataListener && !hadError) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + errorCallback(err); + } + }); + } + }); +} + +function destroyStream(stream, err) { + if (stream.destroy) { + stream.destroy(err); + } else { + // node < 8 + stream.emit('error', err); + stream.end(); + } +} + /** * Redirect code matching * @@ -54221,6 +74449,7 @@ exports.Headers = Headers; exports.Request = Request; exports.Response = Response; exports.FetchError = FetchError; +exports.AbortError = AbortError; /***/ }), @@ -56385,6 +76614,3674 @@ module.exports.implForWrapper = function (wrapper) { +/***/ }), + +/***/ 5560: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var wrappy = __nccwpck_require__(8264) +module.exports = wrappy(once) +module.exports.strict = wrappy(onceStrict) + +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true + }) + + Object.defineProperty(Function.prototype, 'onceStrict', { + value: function () { + return onceStrict(this) + }, + configurable: true + }) +}) + +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) + } + f.called = false + return f +} + +function onceStrict (fn) { + var f = function () { + if (f.called) + throw new Error(f.onceError) + f.called = true + return f.value = fn.apply(this, arguments) + } + var name = fn.name || 'Function wrapped with `once`' + f.onceError = name + " shouldn't be called more than once" + f.called = false + return f +} + + +/***/ }), + +/***/ 5500: +/***/ ((module) => { + +"use strict"; + + +const codes = {}; + +function createErrorType(code, message, Base) { + if (!Base) { + Base = Error + } + + function getMessage (arg1, arg2, arg3) { + if (typeof message === 'string') { + return message + } else { + return message(arg1, arg2, arg3) + } + } + + class NodeError extends Base { + constructor (arg1, arg2, arg3) { + super(getMessage(arg1, arg2, arg3)); + } + } + + NodeError.prototype.name = Base.name; + NodeError.prototype.code = code; + + codes[code] = NodeError; +} + +// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js +function oneOf(expected, thing) { + if (Array.isArray(expected)) { + const len = expected.length; + expected = expected.map((i) => String(i)); + if (len > 2) { + return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` + + expected[len - 1]; + } else if (len === 2) { + return `one of ${thing} ${expected[0]} or ${expected[1]}`; + } else { + return `of ${thing} ${expected[0]}`; + } + } else { + return `of ${thing} ${String(expected)}`; + } +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith +function startsWith(str, search, pos) { + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith +function endsWith(str, search, this_len) { + if (this_len === undefined || this_len > str.length) { + this_len = str.length; + } + return str.substring(this_len - search.length, this_len) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes +function includes(str, search, start) { + if (typeof start !== 'number') { + start = 0; + } + + if (start + search.length > str.length) { + return false; + } else { + return str.indexOf(search, start) !== -1; + } +} + +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { + return 'The value "' + value + '" is invalid for option "' + name + '"' +}, TypeError); +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { + // determiner: 'must be' or 'must not be' + let determiner; + if (typeof expected === 'string' && startsWith(expected, 'not ')) { + determiner = 'must not be'; + expected = expected.replace(/^not /, ''); + } else { + determiner = 'must be'; + } + + let msg; + if (endsWith(name, ' argument')) { + // For cases like 'first argument' + msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`; + } else { + const type = includes(name, '.') ? 'property' : 'argument'; + msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`; + } + + msg += `. Received type ${typeof actual}`; + return msg; +}, TypeError); +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { + return 'The ' + name + ' method is not implemented' +}); +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); +createErrorType('ERR_STREAM_DESTROYED', function (name) { + return 'Cannot call ' + name + ' after a stream was destroyed'; +}); +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { + return 'Unknown encoding: ' + arg +}, TypeError); +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); + +module.exports.F = codes; + + +/***/ }), + +/***/ 2063: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. + + + +/**/ +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) keys.push(key); + return keys; +}; +/**/ + +module.exports = Duplex; +var Readable = __nccwpck_require__(9274); +var Writable = __nccwpck_require__(8797); +__nccwpck_require__(9598)(Duplex, Readable); +{ + // Allow the keys array to be GC'ed. + var keys = objectKeys(Writable.prototype); + for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + } +} +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + Readable.call(this, options); + Writable.call(this, options); + this.allowHalfOpen = true; + if (options) { + if (options.readable === false) this.readable = false; + if (options.writable === false) this.writable = false; + if (options.allowHalfOpen === false) { + this.allowHalfOpen = false; + this.once('end', onend); + } + } +} +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); +Object.defineProperty(Duplex.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); +Object.defineProperty(Duplex.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); + +// the no-half-open enforcer +function onend() { + // If the writable side ended, then we're ok. + if (this._writableState.ended) return; + + // no more data can be written. + // But allow more writes to happen in this tick. + process.nextTick(onEndNT, this); +} +function onEndNT(self) { + self.end(); +} +Object.defineProperty(Duplex.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._readableState === undefined || this._writableState === undefined) { + return false; + } + return this._readableState.destroyed && this._writableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } +}); + +/***/ }), + +/***/ 5283: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. + + + +module.exports = PassThrough; +var Transform = __nccwpck_require__(2337); +__nccwpck_require__(9598)(PassThrough, Transform); +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + Transform.call(this, options); +} +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; + +/***/ }), + +/***/ 9274: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + +module.exports = Readable; + +/**/ +var Duplex; +/**/ + +Readable.ReadableState = ReadableState; + +/**/ +var EE = (__nccwpck_require__(4434).EventEmitter); +var EElistenerCount = function EElistenerCount(emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ +var Stream = __nccwpck_require__(3283); +/**/ + +var Buffer = (__nccwpck_require__(181).Buffer); +var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ +var debugUtil = __nccwpck_require__(9023); +var debug; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function debug() {}; +} +/**/ + +var BufferList = __nccwpck_require__(7336); +var destroyImpl = __nccwpck_require__(5089); +var _require = __nccwpck_require__(4874), + getHighWaterMark = _require.getHighWaterMark; +var _require$codes = (__nccwpck_require__(5500)/* .codes */ .F), + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; + +// Lazy loaded to improve the startup performance. +var StringDecoder; +var createReadableStreamAsyncIterator; +var from; +__nccwpck_require__(9598)(Readable, Stream); +var errorOrDestroy = destroyImpl.errorOrDestroy; +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); + + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +} +function ReadableState(options, stream, isDuplex) { + Duplex = Duplex || __nccwpck_require__(2063); + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; + + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; + + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); + + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + + // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + this.sync = true; + + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + this.paused = true; + + // Should close be emitted on destroy. Defaults to true. + this.emitClose = options.emitClose !== false; + + // Should .destroy() be called after 'end' (and potentially 'finish') + this.autoDestroy = !!options.autoDestroy; + + // has it been destroyed + this.destroyed = false; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; + + // if true, a maybeReadMore has been scheduled + this.readingMore = false; + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = (__nccwpck_require__(634)/* .StringDecoder */ .I); + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} +function Readable(options) { + Duplex = Duplex || __nccwpck_require__(2063); + if (!(this instanceof Readable)) return new Readable(options); + + // Checking for a Stream.Duplex instance is faster here instead of inside + // the ReadableState constructor, at least with V8 6.5 + var isDuplex = this instanceof Duplex; + this._readableState = new ReadableState(options, this, isDuplex); + + // legacy + this.readable = true; + if (options) { + if (typeof options.read === 'function') this._read = options.read; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + } + Stream.call(this); +} +Object.defineProperty(Readable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._readableState === undefined) { + return false; + } + return this._readableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + } +}); +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; +Readable.prototype._destroy = function (err, cb) { + cb(err); +}; + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; + } + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; + +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + debug('readableAddChunk', chunk); + var state = stream._readableState; + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + if (er) { + errorOrDestroy(stream, er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + if (addToFront) { + if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true); + } else if (state.ended) { + errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()); + } else if (state.destroyed) { + return false; + } else { + state.reading = false; + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + maybeReadMore(stream, state); + } + } + + // We can push more data if we are below the highWaterMark. + // Also, if we have no data yet, we can stand some more bytes. + // This is to work around cases where hwm=0, such as the repl. + return !state.ended && (state.length < state.highWaterMark || state.length === 0); +} +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + state.awaitDrain = 0; + stream.emit('data', chunk); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + if (state.needReadable) emitReadable(stream); + } + maybeReadMore(stream, state); +} +function chunkInvalid(state, chunk) { + var er; + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk); + } + return er; +} +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; + +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = (__nccwpck_require__(634)/* .StringDecoder */ .I); + var decoder = new StringDecoder(enc); + this._readableState.decoder = decoder; + // If setEncoding(null), decoder.encoding equals utf8 + this._readableState.encoding = this._readableState.decoder.encoding; + + // Iterate over current buffer to convert already stored Buffers: + var p = this._readableState.buffer.head; + var content = ''; + while (p !== null) { + content += decoder.write(p.data); + p = p.next; + } + this._readableState.buffer.clear(); + if (content !== '') this._readableState.buffer.push(content); + this._readableState.length = content.length; + return this; +}; + +// Don't raise the hwm > 1GB +var MAX_HWM = 0x40000000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE. + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; +} + +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + // Don't have enough + if (!state.ended) { + state.needReadable = true; + return 0; + } + return state.length; +} + +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + if (n !== 0) state.emittedReadable = false; + + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + n = howMuchToRead(n, state); + + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } + + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); + + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } + + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); + } + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + if (ret === null) { + state.needReadable = state.length <= state.highWaterMark; + n = 0; + } else { + state.length -= n; + state.awaitDrain = 0; + } + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this); + } + if (ret !== null) this.emit('data', ret); + return ret; +}; +function onEofChunk(stream, state) { + debug('onEofChunk'); + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + if (state.sync) { + // if we are sync, wait until next tick to emit the data. + // Otherwise we risk emitting data in the flow() + // the readable code triggers during a read() call + emitReadable(stream); + } else { + // emit 'readable' now to make sure it gets picked up. + state.needReadable = false; + if (!state.emittedReadable) { + state.emittedReadable = true; + emitReadable_(stream); + } + } +} + +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + debug('emitReadable', state.needReadable, state.emittedReadable); + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + process.nextTick(emitReadable_, stream); + } +} +function emitReadable_(stream) { + var state = stream._readableState; + debug('emitReadable_', state.destroyed, state.length, state.ended); + if (!state.destroyed && (state.length || state.ended)) { + stream.emit('readable'); + state.emittedReadable = false; + } + + // The stream needs another readable event if + // 1. It is not flowing, as the flow mechanism will take + // care of it. + // 2. It is not ended. + // 3. It is below the highWaterMark, so we can schedule + // another readable later. + state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark; + flow(stream); +} + +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + process.nextTick(maybeReadMore_, stream, state); + } +} +function maybeReadMore_(stream, state) { + // Attempt to read more data if we should. + // + // The conditions for reading more data are (one of): + // - Not enough data buffered (state.length < state.highWaterMark). The loop + // is responsible for filling the buffer with enough data if such data + // is available. If highWaterMark is 0 and we are not in the flowing mode + // we should _not_ attempt to buffer any extra data. We'll get more data + // when the stream consumer calls read() instead. + // - No data in the buffer, and the stream is in flowing mode. In this mode + // the loop below is responsible for ensuring read() is called. Failing to + // call read here would abort the flow and there's no other mechanism for + // continuing the flow if the stream consumer has just subscribed to the + // 'data' event. + // + // In addition to the above conditions to keep reading data, the following + // conditions prevent the data from being read: + // - The stream has ended (state.ended). + // - There is already a pending 'read' operation (state.reading). This is a + // case where the the stream has called the implementation defined _read() + // method, but they are processing the call asynchronously and have _not_ + // called push() with new data. In this case we skip performing more + // read()s. The execution ends in this method again after the _read() ends + // up calling push() with more data. + while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) { + var len = state.length; + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break; + } + state.readingMore = false; +} + +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()')); +}; +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn); + dest.on('unpipe', onunpipe); + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } + function onend() { + debug('onend'); + dest.end(); + } + + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); + cleanedUp = true; + + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + var ret = dest.write(chunk); + debug('dest.write', ret); + if (ret === false) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', state.awaitDrain); + state.awaitDrain++; + } + src.pause(); + } + } + + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er); + } + + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); + + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } + + // tell the dest that it's being piped to + dest.emit('pipe', src); + + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + return dest; +}; +function pipeOnDrain(src) { + return function pipeOnDrainFunctionResult() { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { + hasUnpiped: false + }; + + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; + + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + if (!dest) dest = state.pipes; + + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; + } + + // slow case. multiple pipe destinations. + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + for (var i = 0; i < len; i++) dests[i].emit('unpipe', this, { + hasUnpiped: false + }); + return this; + } + + // try to find the right one. + var index = indexOf(state.pipes, dest); + if (index === -1) return this; + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + dest.emit('unpipe', this, unpipeInfo); + return this; +}; + +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + var state = this._readableState; + if (ev === 'data') { + // update readableListening so that resume() may be a no-op + // a few lines down. This is needed to support once('readable'). + state.readableListening = this.listenerCount('readable') > 0; + + // Try start flowing on next tick if stream isn't explicitly paused + if (state.flowing !== false) this.resume(); + } else if (ev === 'readable') { + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.flowing = false; + state.emittedReadable = false; + debug('on readable', state.length, state.reading); + if (state.length) { + emitReadable(this); + } else if (!state.reading) { + process.nextTick(nReadingNextTick, this); + } + } + } + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; +Readable.prototype.removeListener = function (ev, fn) { + var res = Stream.prototype.removeListener.call(this, ev, fn); + if (ev === 'readable') { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + return res; +}; +Readable.prototype.removeAllListeners = function (ev) { + var res = Stream.prototype.removeAllListeners.apply(this, arguments); + if (ev === 'readable' || ev === undefined) { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + return res; +}; +function updateReadableListening(self) { + var state = self._readableState; + state.readableListening = self.listenerCount('readable') > 0; + if (state.resumeScheduled && !state.paused) { + // flowing needs to be set to true now, otherwise + // the upcoming resume will not flow. + state.flowing = true; + + // crude way to check if we should resume + } else if (self.listenerCount('data') > 0) { + self.resume(); + } +} +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} + +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + // we flow only if there is no one listening + // for readable, but we still have to call + // resume() + state.flowing = !state.readableListening; + resume(this, state); + } + state.paused = false; + return this; +}; +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + process.nextTick(resume_, stream, state); + } +} +function resume_(stream, state) { + debug('resume', state.reading); + if (!state.reading) { + stream.read(0); + } + state.resumeScheduled = false; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (this._readableState.flowing !== false) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + this._readableState.paused = true; + return this; +}; +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null); +} + +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var _this = this; + var state = this._readableState; + var paused = false; + stream.on('end', function () { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); + } + _this.push(null); + }); + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); + + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + var ret = _this.push(chunk); + if (!ret) { + paused = true; + stream.pause(); + } + }); + + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function methodWrap(method) { + return function methodWrapReturnFunction() { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } + + // proxy certain important events. + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } + + // when we try to consume some more bytes, simply unpause the + // underlying stream. + this._read = function (n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; + return this; +}; +if (typeof Symbol === 'function') { + Readable.prototype[Symbol.asyncIterator] = function () { + if (createReadableStreamAsyncIterator === undefined) { + createReadableStreamAsyncIterator = __nccwpck_require__(4868); + } + return createReadableStreamAsyncIterator(this); + }; +} +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.highWaterMark; + } +}); +Object.defineProperty(Readable.prototype, 'readableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState && this._readableState.buffer; + } +}); +Object.defineProperty(Readable.prototype, 'readableFlowing', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.flowing; + }, + set: function set(state) { + if (this._readableState) { + this._readableState.flowing = state; + } + } +}); + +// exposed for testing purposes only. +Readable._fromList = fromList; +Object.defineProperty(Readable.prototype, 'readableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.length; + } +}); + +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = state.buffer.consume(n, state.decoder); + } + return ret; +} +function endReadable(stream) { + var state = stream._readableState; + debug('endReadable', state.endEmitted); + if (!state.endEmitted) { + state.ended = true; + process.nextTick(endReadableNT, state, stream); + } +} +function endReadableNT(state, stream) { + debug('endReadableNT', state.endEmitted, state.length); + + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the writable side is ready for autoDestroy as well + var wState = stream._writableState; + if (!wState || wState.autoDestroy && wState.finished) { + stream.destroy(); + } + } + } +} +if (typeof Symbol === 'function') { + Readable.from = function (iterable, opts) { + if (from === undefined) { + from = __nccwpck_require__(4659); + } + return from(Readable, iterable, opts); + }; +} +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} + +/***/ }), + +/***/ 2337: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. + + + +module.exports = Transform; +var _require$codes = (__nccwpck_require__(5500)/* .codes */ .F), + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING, + ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0; +var Duplex = __nccwpck_require__(2063); +__nccwpck_require__(9598)(Transform, Duplex); +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + var cb = ts.writecb; + if (cb === null) { + return this.emit('error', new ERR_MULTIPLE_CALLBACK()); + } + ts.writechunk = null; + ts.writecb = null; + if (data != null) + // single equals check for both `null` and `undefined` + this.push(data); + cb(er); + var rs = this._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); + } +} +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + Duplex.call(this, options); + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; + + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; + + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + if (typeof options.flush === 'function') this._flush = options.flush; + } + + // When the writable side finishes, then flush out anything remaining. + this.on('prefinish', prefinish); +} +function prefinish() { + var _this = this; + if (typeof this._flush === 'function' && !this._readableState.destroyed) { + this._flush(function (er, data) { + done(_this, er, data); + }); + } else { + done(this, null, null); + } +} +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; + +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()')); +}; +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; + +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; + if (ts.writechunk !== null && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; +Transform.prototype._destroy = function (err, cb) { + Duplex.prototype._destroy.call(this, err, function (err2) { + cb(err2); + }); +}; +function done(stream, er, data) { + if (er) return stream.emit('error', er); + if (data != null) + // single equals check for both `null` and `undefined` + stream.push(data); + + // TODO(BridgeAR): Write a test for these two error cases + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0(); + if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING(); + return stream.push(null); +} + +/***/ }), + +/***/ 8797: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. + + + +module.exports = Writable; + +/* */ +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} + +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + var _this = this; + this.next = null; + this.entry = null; + this.finish = function () { + onCorkedFinish(_this, state); + }; +} +/* */ + +/**/ +var Duplex; +/**/ + +Writable.WritableState = WritableState; + +/**/ +var internalUtil = { + deprecate: __nccwpck_require__(4488) +}; +/**/ + +/**/ +var Stream = __nccwpck_require__(3283); +/**/ + +var Buffer = (__nccwpck_require__(181).Buffer); +var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} +var destroyImpl = __nccwpck_require__(5089); +var _require = __nccwpck_require__(4874), + getHighWaterMark = _require.getHighWaterMark; +var _require$codes = (__nccwpck_require__(5500)/* .codes */ .F), + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED, + ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES, + ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END, + ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING; +var errorOrDestroy = destroyImpl.errorOrDestroy; +__nccwpck_require__(9598)(Writable, Stream); +function nop() {} +function WritableState(options, stream, isDuplex) { + Duplex = Duplex || __nccwpck_require__(2063); + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream, + // e.g. options.readableObjectMode vs. options.writableObjectMode, etc. + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; + + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); + + // if _final has been called + this.finalCalled = false; + + // drain event flag. + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; + + // has it been destroyed + this.destroyed = false; + + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; + + // a flag to see when we're in the middle of a write. + this.writing = false; + + // when true all writes will be buffered until .uncork() call + this.corked = 0; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; + + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; + + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + + // the amount that is being written when _write is called. + this.writelen = 0; + this.bufferedRequest = null; + this.lastBufferedRequest = null; + + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; + + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; + + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; + + // Should close be emitted on destroy. Defaults to true. + this.emitClose = options.emitClose !== false; + + // Should .destroy() be called after 'finish' (and potentially 'end') + this.autoDestroy = !!options.autoDestroy; + + // count buffered requests + this.bufferedRequestCount = 0; + + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); +} +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; + } + return out; +}; +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function writableStateBufferGetter() { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); + +// Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. +var realHasInstance; +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function value(object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; + return object && object._writableState instanceof WritableState; + } + }); +} else { + realHasInstance = function realHasInstance(object) { + return object instanceof this; + }; +} +function Writable(options) { + Duplex = Duplex || __nccwpck_require__(2063); + + // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. + + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + + // Checking for a Stream.Duplex instance is faster here instead of inside + // the WritableState constructor, at least with V8 6.5 + var isDuplex = this instanceof Duplex; + if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options); + this._writableState = new WritableState(options, this, isDuplex); + + // legacy. + this.writable = true; + if (options) { + if (typeof options.write === 'function') this._write = options.write; + if (typeof options.writev === 'function') this._writev = options.writev; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + if (typeof options.final === 'function') this._final = options.final; + } + Stream.call(this); +} + +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); +}; +function writeAfterEnd(stream, cb) { + var er = new ERR_STREAM_WRITE_AFTER_END(); + // TODO: defer error events consistently everywhere, not just the cb + errorOrDestroy(stream, er); + process.nextTick(cb, er); +} + +// Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. +function validChunk(stream, state, chunk, cb) { + var er; + if (chunk === null) { + er = new ERR_STREAM_NULL_VALUES(); + } else if (typeof chunk !== 'string' && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk); + } + if (er) { + errorOrDestroy(stream, er); + process.nextTick(cb, er); + return false; + } + return true; +} +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + var isBuf = !state.objectMode && _isUint8Array(chunk); + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + if (typeof cb !== 'function') cb = nop; + if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } + return ret; +}; +Writable.prototype.cork = function () { + this._writableState.corked++; +}; +Writable.prototype.uncork = function () { + var state = this._writableState; + if (state.corked) { + state.corked--; + if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; +Object.defineProperty(Writable.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + return chunk; +} +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); + +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; + } + } + var len = state.objectMode ? 1 : chunk.length; + state.length += len; + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk: chunk, + encoding: encoding, + isBuf: isBuf, + callback: cb, + next: null + }; + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + return ret; +} +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + process.nextTick(cb, er); + // this can emit finish, and it will always happen + // after error + process.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); + // this can emit finish, but finish must + // always follow error + finishMaybe(stream, state); + } +} +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK(); + onwriteStateUpdate(state); + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state) || stream.destroyed; + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + if (sync) { + process.nextTick(afterWrite, stream, state, finished, cb); + } else { + afterWrite(stream, state, finished, cb); + } + } +} +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} + +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} + +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + var count = 0; + var allBuffers = true; + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + buffer.allBuffers = allBuffers; + doWrite(stream, state, true, state.length, buffer, '', holder.finish); + + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } + } + if (entry === null) state.lastBufferedRequest = null; + } + state.bufferedRequest = entry; + state.bufferProcessing = false; +} +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()')); +}; +Writable.prototype._writev = null; +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); + + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } + + // ignore unnecessary end() calls. + if (!state.ending) endWritable(this, state, cb); + return this; +}; +Object.defineProperty(Writable.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} +function callFinal(stream, state) { + stream._final(function (err) { + state.pendingcb--; + if (err) { + errorOrDestroy(stream, err); + } + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); +} +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function' && !state.destroyed) { + state.pendingcb++; + state.finalCalled = true; + process.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); + } + } +} +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + prefinish(stream, state); + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the readable side is ready for autoDestroy as well + var rState = stream._readableState; + if (!rState || rState.autoDestroy && rState.endEmitted) { + stream.destroy(); + } + } + } + } + return need; +} +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) process.nextTick(cb);else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; +} +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } + + // reuse the free corkReq. + state.corkedRequestsFree.next = corkReq; +} +Object.defineProperty(Writable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._writableState === undefined) { + return false; + } + return this._writableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._writableState.destroyed = value; + } +}); +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; +Writable.prototype._destroy = function (err, cb) { + cb(err); +}; + +/***/ }), + +/***/ 4868: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var _Object$setPrototypeO; +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var finished = __nccwpck_require__(6815); +var kLastResolve = Symbol('lastResolve'); +var kLastReject = Symbol('lastReject'); +var kError = Symbol('error'); +var kEnded = Symbol('ended'); +var kLastPromise = Symbol('lastPromise'); +var kHandlePromise = Symbol('handlePromise'); +var kStream = Symbol('stream'); +function createIterResult(value, done) { + return { + value: value, + done: done + }; +} +function readAndResolve(iter) { + var resolve = iter[kLastResolve]; + if (resolve !== null) { + var data = iter[kStream].read(); + // we defer if data is null + // we can be expecting either 'end' or + // 'error' + if (data !== null) { + iter[kLastPromise] = null; + iter[kLastResolve] = null; + iter[kLastReject] = null; + resolve(createIterResult(data, false)); + } + } +} +function onReadable(iter) { + // we wait for the next tick, because it might + // emit an error with process.nextTick + process.nextTick(readAndResolve, iter); +} +function wrapForNext(lastPromise, iter) { + return function (resolve, reject) { + lastPromise.then(function () { + if (iter[kEnded]) { + resolve(createIterResult(undefined, true)); + return; + } + iter[kHandlePromise](resolve, reject); + }, reject); + }; +} +var AsyncIteratorPrototype = Object.getPrototypeOf(function () {}); +var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = { + get stream() { + return this[kStream]; + }, + next: function next() { + var _this = this; + // if we have detected an error in the meanwhile + // reject straight away + var error = this[kError]; + if (error !== null) { + return Promise.reject(error); + } + if (this[kEnded]) { + return Promise.resolve(createIterResult(undefined, true)); + } + if (this[kStream].destroyed) { + // We need to defer via nextTick because if .destroy(err) is + // called, the error will be emitted via nextTick, and + // we cannot guarantee that there is no error lingering around + // waiting to be emitted. + return new Promise(function (resolve, reject) { + process.nextTick(function () { + if (_this[kError]) { + reject(_this[kError]); + } else { + resolve(createIterResult(undefined, true)); + } + }); + }); + } + + // if we have multiple next() calls + // we will wait for the previous Promise to finish + // this logic is optimized to support for await loops, + // where next() is only called once at a time + var lastPromise = this[kLastPromise]; + var promise; + if (lastPromise) { + promise = new Promise(wrapForNext(lastPromise, this)); + } else { + // fast path needed to support multiple this.push() + // without triggering the next() queue + var data = this[kStream].read(); + if (data !== null) { + return Promise.resolve(createIterResult(data, false)); + } + promise = new Promise(this[kHandlePromise]); + } + this[kLastPromise] = promise; + return promise; + } +}, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () { + return this; +}), _defineProperty(_Object$setPrototypeO, "return", function _return() { + var _this2 = this; + // destroy(err, cb) is a private API + // we can guarantee we have that here, because we control the + // Readable class this is attached to + return new Promise(function (resolve, reject) { + _this2[kStream].destroy(null, function (err) { + if (err) { + reject(err); + return; + } + resolve(createIterResult(undefined, true)); + }); + }); +}), _Object$setPrototypeO), AsyncIteratorPrototype); +var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) { + var _Object$create; + var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, { + value: stream, + writable: true + }), _defineProperty(_Object$create, kLastResolve, { + value: null, + writable: true + }), _defineProperty(_Object$create, kLastReject, { + value: null, + writable: true + }), _defineProperty(_Object$create, kError, { + value: null, + writable: true + }), _defineProperty(_Object$create, kEnded, { + value: stream._readableState.endEmitted, + writable: true + }), _defineProperty(_Object$create, kHandlePromise, { + value: function value(resolve, reject) { + var data = iterator[kStream].read(); + if (data) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(data, false)); + } else { + iterator[kLastResolve] = resolve; + iterator[kLastReject] = reject; + } + }, + writable: true + }), _Object$create)); + iterator[kLastPromise] = null; + finished(stream, function (err) { + if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { + var reject = iterator[kLastReject]; + // reject if we are waiting for data in the Promise + // returned by next() and store the error + if (reject !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + reject(err); + } + iterator[kError] = err; + return; + } + var resolve = iterator[kLastResolve]; + if (resolve !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(undefined, true)); + } + iterator[kEnded] = true; + }); + stream.on('readable', onReadable.bind(null, iterator)); + return iterator; +}; +module.exports = createReadableStreamAsyncIterator; + +/***/ }), + +/***/ 7336: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } +function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, _toPropertyKey(descriptor.key), descriptor); } } +function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); Object.defineProperty(Constructor, "prototype", { writable: false }); return Constructor; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var _require = __nccwpck_require__(181), + Buffer = _require.Buffer; +var _require2 = __nccwpck_require__(9023), + inspect = _require2.inspect; +var custom = inspect && inspect.custom || 'inspect'; +function copyBuffer(src, target, offset) { + Buffer.prototype.copy.call(src, target, offset); +} +module.exports = /*#__PURE__*/function () { + function BufferList() { + _classCallCheck(this, BufferList); + this.head = null; + this.tail = null; + this.length = 0; + } + _createClass(BufferList, [{ + key: "push", + value: function push(v) { + var entry = { + data: v, + next: null + }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + } + }, { + key: "unshift", + value: function unshift(v) { + var entry = { + data: v, + next: this.head + }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + } + }, { + key: "shift", + value: function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + } + }, { + key: "clear", + value: function clear() { + this.head = this.tail = null; + this.length = 0; + } + }, { + key: "join", + value: function join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) ret += s + p.data; + return ret; + } + }, { + key: "concat", + value: function concat(n) { + if (this.length === 0) return Buffer.alloc(0); + var ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + return ret; + } + + // Consumes a specified amount of bytes or characters from the buffered data. + }, { + key: "consume", + value: function consume(n, hasStrings) { + var ret; + if (n < this.head.data.length) { + // `slice` is the same for buffers and strings. + ret = this.head.data.slice(0, n); + this.head.data = this.head.data.slice(n); + } else if (n === this.head.data.length) { + // First chunk is a perfect match. + ret = this.shift(); + } else { + // Result spans more than one buffer. + ret = hasStrings ? this._getString(n) : this._getBuffer(n); + } + return ret; + } + }, { + key: "first", + value: function first() { + return this.head.data; + } + + // Consumes a specified amount of characters from the buffered data. + }, { + key: "_getString", + value: function _getString(n) { + var p = this.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = str.slice(nb); + } + break; + } + ++c; + } + this.length -= c; + return ret; + } + + // Consumes a specified amount of bytes from the buffered data. + }, { + key: "_getBuffer", + value: function _getBuffer(n) { + var ret = Buffer.allocUnsafe(n); + var p = this.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = buf.slice(nb); + } + break; + } + ++c; + } + this.length -= c; + return ret; + } + + // Make sure the linked list only shows the minimal necessary information. + }, { + key: custom, + value: function value(_, options) { + return inspect(this, _objectSpread(_objectSpread({}, options), {}, { + // Only inspect one level. + depth: 0, + // It should not recurse. + customInspect: false + })); + } + }]); + return BufferList; +}(); + +/***/ }), + +/***/ 5089: +/***/ ((module) => { + +"use strict"; + + +// undocumented cb() API, needed for core, not for public API +function destroy(err, cb) { + var _this = this; + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err) { + if (!this._writableState) { + process.nextTick(emitErrorNT, this, err); + } else if (!this._writableState.errorEmitted) { + this._writableState.errorEmitted = true; + process.nextTick(emitErrorNT, this, err); + } + } + return this; + } + + // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks + + if (this._readableState) { + this._readableState.destroyed = true; + } + + // if this is a duplex stream mark the writable part as destroyed as well + if (this._writableState) { + this._writableState.destroyed = true; + } + this._destroy(err || null, function (err) { + if (!cb && err) { + if (!_this._writableState) { + process.nextTick(emitErrorAndCloseNT, _this, err); + } else if (!_this._writableState.errorEmitted) { + _this._writableState.errorEmitted = true; + process.nextTick(emitErrorAndCloseNT, _this, err); + } else { + process.nextTick(emitCloseNT, _this); + } + } else if (cb) { + process.nextTick(emitCloseNT, _this); + cb(err); + } else { + process.nextTick(emitCloseNT, _this); + } + }); + return this; +} +function emitErrorAndCloseNT(self, err) { + emitErrorNT(self, err); + emitCloseNT(self); +} +function emitCloseNT(self) { + if (self._writableState && !self._writableState.emitClose) return; + if (self._readableState && !self._readableState.emitClose) return; + self.emit('close'); +} +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; + } + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finalCalled = false; + this._writableState.prefinished = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; + } +} +function emitErrorNT(self, err) { + self.emit('error', err); +} +function errorOrDestroy(stream, err) { + // We have tests that rely on errors being emitted + // in the same tick, so changing this is semver major. + // For now when you opt-in to autoDestroy we allow + // the error to be emitted nextTick. In a future + // semver major update we should change the default to this. + + var rState = stream._readableState; + var wState = stream._writableState; + if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err); +} +module.exports = { + destroy: destroy, + undestroy: undestroy, + errorOrDestroy: errorOrDestroy +}; + +/***/ }), + +/***/ 6815: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Ported from https://github.com/mafintosh/end-of-stream with +// permission from the author, Mathias Buus (@mafintosh). + + + +var ERR_STREAM_PREMATURE_CLOSE = (__nccwpck_require__(5500)/* .codes */ .F).ERR_STREAM_PREMATURE_CLOSE; +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { + args[_key] = arguments[_key]; + } + callback.apply(this, args); + }; +} +function noop() {} +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} +function eos(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + callback = once(callback || noop); + var readable = opts.readable || opts.readable !== false && stream.readable; + var writable = opts.writable || opts.writable !== false && stream.writable; + var onlegacyfinish = function onlegacyfinish() { + if (!stream.writable) onfinish(); + }; + var writableEnded = stream._writableState && stream._writableState.finished; + var onfinish = function onfinish() { + writable = false; + writableEnded = true; + if (!readable) callback.call(stream); + }; + var readableEnded = stream._readableState && stream._readableState.endEmitted; + var onend = function onend() { + readable = false; + readableEnded = true; + if (!writable) callback.call(stream); + }; + var onerror = function onerror(err) { + callback.call(stream, err); + }; + var onclose = function onclose() { + var err; + if (readable && !readableEnded) { + if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + if (writable && !writableEnded) { + if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + }; + var onrequest = function onrequest() { + stream.req.on('finish', onfinish); + }; + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest();else stream.on('request', onrequest); + } else if (writable && !stream._writableState) { + // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + return function () { + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +} +module.exports = eos; + +/***/ }), + +/***/ 4659: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } +function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var ERR_INVALID_ARG_TYPE = (__nccwpck_require__(5500)/* .codes */ .F).ERR_INVALID_ARG_TYPE; +function from(Readable, iterable, opts) { + var iterator; + if (iterable && typeof iterable.next === 'function') { + iterator = iterable; + } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable); + var readable = new Readable(_objectSpread({ + objectMode: true + }, opts)); + // Reading boolean to protect against _read + // being called before last iteration completion. + var reading = false; + readable._read = function () { + if (!reading) { + reading = true; + next(); + } + }; + function next() { + return _next2.apply(this, arguments); + } + function _next2() { + _next2 = _asyncToGenerator(function* () { + try { + var _yield$iterator$next = yield iterator.next(), + value = _yield$iterator$next.value, + done = _yield$iterator$next.done; + if (done) { + readable.push(null); + } else if (readable.push(yield value)) { + next(); + } else { + reading = false; + } + } catch (err) { + readable.destroy(err); + } + }); + return _next2.apply(this, arguments); + } + return readable; +} +module.exports = from; + + +/***/ }), + +/***/ 6701: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Ported from https://github.com/mafintosh/pump with +// permission from the author, Mathias Buus (@mafintosh). + + + +var eos; +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + callback.apply(void 0, arguments); + }; +} +var _require$codes = (__nccwpck_require__(5500)/* .codes */ .F), + ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED; +function noop(err) { + // Rethrow the error if it exists to avoid swallowing it + if (err) throw err; +} +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} +function destroyer(stream, reading, writing, callback) { + callback = once(callback); + var closed = false; + stream.on('close', function () { + closed = true; + }); + if (eos === undefined) eos = __nccwpck_require__(6815); + eos(stream, { + readable: reading, + writable: writing + }, function (err) { + if (err) return callback(err); + closed = true; + callback(); + }); + var destroyed = false; + return function (err) { + if (closed) return; + if (destroyed) return; + destroyed = true; + + // request.destroy just do .end - .abort is what we want + if (isRequest(stream)) return stream.abort(); + if (typeof stream.destroy === 'function') return stream.destroy(); + callback(err || new ERR_STREAM_DESTROYED('pipe')); + }; +} +function call(fn) { + fn(); +} +function pipe(from, to) { + return from.pipe(to); +} +function popCallback(streams) { + if (!streams.length) return noop; + if (typeof streams[streams.length - 1] !== 'function') return noop; + return streams.pop(); +} +function pipeline() { + for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) { + streams[_key] = arguments[_key]; + } + var callback = popCallback(streams); + if (Array.isArray(streams[0])) streams = streams[0]; + if (streams.length < 2) { + throw new ERR_MISSING_ARGS('streams'); + } + var error; + var destroys = streams.map(function (stream, i) { + var reading = i < streams.length - 1; + var writing = i > 0; + return destroyer(stream, reading, writing, function (err) { + if (!error) error = err; + if (err) destroys.forEach(call); + if (reading) return; + destroys.forEach(call); + callback(error); + }); + }); + return streams.reduce(pipe); +} +module.exports = pipeline; + +/***/ }), + +/***/ 4874: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var ERR_INVALID_OPT_VALUE = (__nccwpck_require__(5500)/* .codes */ .F).ERR_INVALID_OPT_VALUE; +function highWaterMarkFrom(options, isDuplex, duplexKey) { + return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null; +} +function getHighWaterMark(state, options, duplexKey, isDuplex) { + var hwm = highWaterMarkFrom(options, isDuplex, duplexKey); + if (hwm != null) { + if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) { + var name = isDuplex ? duplexKey : 'highWaterMark'; + throw new ERR_INVALID_OPT_VALUE(name, hwm); + } + return Math.floor(hwm); + } + + // Default value + return state.objectMode ? 16 : 16 * 1024; +} +module.exports = { + getHighWaterMark: getHighWaterMark +}; + +/***/ }), + +/***/ 3283: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +module.exports = __nccwpck_require__(2203); + + +/***/ }), + +/***/ 6131: +/***/ ((module, exports, __nccwpck_require__) => { + +var Stream = __nccwpck_require__(2203); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream.Readable; + Object.assign(module.exports, Stream); + module.exports.Stream = Stream; +} else { + exports = module.exports = __nccwpck_require__(9274); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = __nccwpck_require__(8797); + exports.Duplex = __nccwpck_require__(2063); + exports.Transform = __nccwpck_require__(2337); + exports.PassThrough = __nccwpck_require__(5283); + exports.finished = __nccwpck_require__(6815); + exports.pipeline = __nccwpck_require__(6701); +} + + +/***/ }), + +/***/ 7842: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const {PassThrough} = __nccwpck_require__(2203); +const extend = __nccwpck_require__(3860); + +let debug = () => {}; +if ( + typeof process !== 'undefined' && + 'env' in process && + typeof process.env === 'object' && + process.env.DEBUG === 'retry-request' +) { + debug = message => { + console.log('retry-request:', message); + }; +} + +const DEFAULTS = { + objectMode: false, + retries: 2, + + /* + The maximum time to delay in seconds. If retryDelayMultiplier results in a + delay greater than maxRetryDelay, retries should delay by maxRetryDelay + seconds instead. + */ + maxRetryDelay: 64, + + /* + The multiplier by which to increase the delay time between the completion of + failed requests, and the initiation of the subsequent retrying request. + */ + retryDelayMultiplier: 2, + + /* + The length of time to keep retrying in seconds. The last sleep period will + be shortened as necessary, so that the last retry runs at deadline (and not + considerably beyond it). The total time starting from when the initial + request is sent, after which an error will be returned, regardless of the + retrying attempts made meanwhile. + */ + totalTimeout: 600, + + noResponseRetries: 2, + currentRetryAttempt: 0, + shouldRetryFn: function (response) { + const retryRanges = [ + // https://en.wikipedia.org/wiki/List_of_HTTP_status_codes + // 1xx - Retry (Informational, request still processing) + // 2xx - Do not retry (Success) + // 3xx - Do not retry (Redirect) + // 4xx - Do not retry (Client errors) + // 429 - Retry ("Too Many Requests") + // 5xx - Retry (Server errors) + [100, 199], + [429, 429], + [500, 599], + ]; + + const statusCode = response.statusCode; + debug(`Response status: ${statusCode}`); + + let range; + while ((range = retryRanges.shift())) { + if (statusCode >= range[0] && statusCode <= range[1]) { + // Not a successful status or redirect. + return true; + } + } + }, +}; + +function retryRequest(requestOpts, opts, callback) { + if (typeof requestOpts === 'string') { + requestOpts = {url: requestOpts}; + } + + const streamMode = typeof arguments[arguments.length - 1] !== 'function'; + + if (typeof opts === 'function') { + callback = opts; + } + + const manualCurrentRetryAttemptWasSet = + opts && typeof opts.currentRetryAttempt === 'number'; + opts = extend({}, DEFAULTS, opts); + + if (typeof opts.request === 'undefined') { + throw new Error('A request library must be provided to retry-request.'); + } + + let currentRetryAttempt = opts.currentRetryAttempt; + + let numNoResponseAttempts = 0; + let streamResponseHandled = false; + + let retryStream; + let requestStream; + let delayStream; + + let activeRequest; + const retryRequest = { + abort: function () { + if (activeRequest && activeRequest.abort) { + activeRequest.abort(); + } + }, + }; + + if (streamMode) { + retryStream = new PassThrough({objectMode: opts.objectMode}); + retryStream.abort = resetStreams; + } + + const timeOfFirstRequest = Date.now(); + if (currentRetryAttempt > 0) { + retryAfterDelay(currentRetryAttempt); + } else { + makeRequest(); + } + + if (streamMode) { + return retryStream; + } else { + return retryRequest; + } + + function resetStreams() { + delayStream = null; + + if (requestStream) { + requestStream.abort && requestStream.abort(); + requestStream.cancel && requestStream.cancel(); + + if (requestStream.destroy) { + requestStream.destroy(); + } else if (requestStream.end) { + requestStream.end(); + } + } + } + + function makeRequest() { + let finishHandled = false; + currentRetryAttempt++; + debug(`Current retry attempt: ${currentRetryAttempt}`); + + function handleFinish(args = []) { + if (!finishHandled) { + finishHandled = true; + retryStream.emit('complete', ...args); + } + } + + if (streamMode) { + streamResponseHandled = false; + + delayStream = new PassThrough({objectMode: opts.objectMode}); + requestStream = opts.request(requestOpts); + + setImmediate(() => { + retryStream.emit('request'); + }); + + requestStream + // gRPC via google-cloud-node can emit an `error` as well as a `response` + // Whichever it emits, we run with-- we can't run with both. That's what + // is up with the `streamResponseHandled` tracking. + .on('error', err => { + if (streamResponseHandled) { + return; + } + + streamResponseHandled = true; + onResponse(err); + }) + .on('response', (resp, body) => { + if (streamResponseHandled) { + return; + } + + streamResponseHandled = true; + onResponse(null, resp, body); + }) + .on('complete', (...params) => handleFinish(params)) + .on('finish', (...params) => handleFinish(params)); + + requestStream.pipe(delayStream); + } else { + activeRequest = opts.request(requestOpts, onResponse); + } + } + + function retryAfterDelay(currentRetryAttempt) { + if (streamMode) { + resetStreams(); + } + + const nextRetryDelay = getNextRetryDelay({ + maxRetryDelay: opts.maxRetryDelay, + retryDelayMultiplier: opts.retryDelayMultiplier, + retryNumber: currentRetryAttempt, + timeOfFirstRequest, + totalTimeout: opts.totalTimeout, + }); + debug(`Next retry delay: ${nextRetryDelay}`); + + if (nextRetryDelay <= 0) { + numNoResponseAttempts = opts.noResponseRetries + 1; + return; + } + + setTimeout(makeRequest, nextRetryDelay); + } + + function onResponse(err, response, body) { + // An error such as DNS resolution. + if (err) { + numNoResponseAttempts++; + + if (numNoResponseAttempts <= opts.noResponseRetries) { + retryAfterDelay(numNoResponseAttempts); + } else { + if (streamMode) { + retryStream.emit('error', err); + retryStream.end(); + } else { + callback(err, response, body); + } + } + + return; + } + + // Send the response to see if we should try again. + // NOTE: "currentRetryAttempt" isn't accurate by default, as it counts + // the very first request sent as the first "retry". It is only accurate + // when a user provides their own "currentRetryAttempt" option at + // instantiation. + const adjustedCurrentRetryAttempt = manualCurrentRetryAttemptWasSet + ? currentRetryAttempt + : currentRetryAttempt - 1; + if ( + adjustedCurrentRetryAttempt < opts.retries && + opts.shouldRetryFn(response) + ) { + retryAfterDelay(currentRetryAttempt); + return; + } + + // No more attempts need to be made, just continue on. + if (streamMode) { + retryStream.emit('response', response); + delayStream.pipe(retryStream); + requestStream.on('error', err => { + retryStream.destroy(err); + }); + } else { + callback(err, response, body); + } + } +} + +module.exports = retryRequest; + +function getNextRetryDelay(config) { + const { + maxRetryDelay, + retryDelayMultiplier, + retryNumber, + timeOfFirstRequest, + totalTimeout, + } = config; + + const maxRetryDelayMs = maxRetryDelay * 1000; + const totalTimeoutMs = totalTimeout * 1000; + + const jitter = Math.floor(Math.random() * 1000); + const calculatedNextRetryDelay = + Math.pow(retryDelayMultiplier, retryNumber) * 1000 + jitter; + + const maxAllowableDelayMs = + totalTimeoutMs - (Date.now() - timeOfFirstRequest); + + return Math.min( + calculatedNextRetryDelay, + maxAllowableDelayMs, + maxRetryDelayMs + ); +} + +module.exports.defaults = DEFAULTS; +module.exports.getNextRetryDelay = getNextRetryDelay; + + +/***/ }), + +/***/ 5546: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +module.exports = __nccwpck_require__(7084); + +/***/ }), + +/***/ 7084: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +var RetryOperation = __nccwpck_require__(9538); + +exports.operation = function(options) { + var timeouts = exports.timeouts(options); + return new RetryOperation(timeouts, { + forever: options && (options.forever || options.retries === Infinity), + unref: options && options.unref, + maxRetryTime: options && options.maxRetryTime + }); +}; + +exports.timeouts = function(options) { + if (options instanceof Array) { + return [].concat(options); + } + + var opts = { + retries: 10, + factor: 2, + minTimeout: 1 * 1000, + maxTimeout: Infinity, + randomize: false + }; + for (var key in options) { + opts[key] = options[key]; + } + + if (opts.minTimeout > opts.maxTimeout) { + throw new Error('minTimeout is greater than maxTimeout'); + } + + var timeouts = []; + for (var i = 0; i < opts.retries; i++) { + timeouts.push(this.createTimeout(i, opts)); + } + + if (options && options.forever && !timeouts.length) { + timeouts.push(this.createTimeout(i, opts)); + } + + // sort the array numerically ascending + timeouts.sort(function(a,b) { + return a - b; + }); + + return timeouts; +}; + +exports.createTimeout = function(attempt, opts) { + var random = (opts.randomize) + ? (Math.random() + 1) + : 1; + + var timeout = Math.round(random * Math.max(opts.minTimeout, 1) * Math.pow(opts.factor, attempt)); + timeout = Math.min(timeout, opts.maxTimeout); + + return timeout; +}; + +exports.wrap = function(obj, options, methods) { + if (options instanceof Array) { + methods = options; + options = null; + } + + if (!methods) { + methods = []; + for (var key in obj) { + if (typeof obj[key] === 'function') { + methods.push(key); + } + } + } + + for (var i = 0; i < methods.length; i++) { + var method = methods[i]; + var original = obj[method]; + + obj[method] = function retryWrapper(original) { + var op = exports.operation(options); + var args = Array.prototype.slice.call(arguments, 1); + var callback = args.pop(); + + args.push(function(err) { + if (op.retry(err)) { + return; + } + if (err) { + arguments[0] = op.mainError(); + } + callback.apply(this, arguments); + }); + + op.attempt(function() { + original.apply(obj, args); + }); + }.bind(obj, original); + obj[method].options = options; + } +}; + + +/***/ }), + +/***/ 9538: +/***/ ((module) => { + +function RetryOperation(timeouts, options) { + // Compatibility for the old (timeouts, retryForever) signature + if (typeof options === 'boolean') { + options = { forever: options }; + } + + this._originalTimeouts = JSON.parse(JSON.stringify(timeouts)); + this._timeouts = timeouts; + this._options = options || {}; + this._maxRetryTime = options && options.maxRetryTime || Infinity; + this._fn = null; + this._errors = []; + this._attempts = 1; + this._operationTimeout = null; + this._operationTimeoutCb = null; + this._timeout = null; + this._operationStart = null; + this._timer = null; + + if (this._options.forever) { + this._cachedTimeouts = this._timeouts.slice(0); + } +} +module.exports = RetryOperation; + +RetryOperation.prototype.reset = function() { + this._attempts = 1; + this._timeouts = this._originalTimeouts.slice(0); +} + +RetryOperation.prototype.stop = function() { + if (this._timeout) { + clearTimeout(this._timeout); + } + if (this._timer) { + clearTimeout(this._timer); + } + + this._timeouts = []; + this._cachedTimeouts = null; +}; + +RetryOperation.prototype.retry = function(err) { + if (this._timeout) { + clearTimeout(this._timeout); + } + + if (!err) { + return false; + } + var currentTime = new Date().getTime(); + if (err && currentTime - this._operationStart >= this._maxRetryTime) { + this._errors.push(err); + this._errors.unshift(new Error('RetryOperation timeout occurred')); + return false; + } + + this._errors.push(err); + + var timeout = this._timeouts.shift(); + if (timeout === undefined) { + if (this._cachedTimeouts) { + // retry forever, only keep last error + this._errors.splice(0, this._errors.length - 1); + timeout = this._cachedTimeouts.slice(-1); + } else { + return false; + } + } + + var self = this; + this._timer = setTimeout(function() { + self._attempts++; + + if (self._operationTimeoutCb) { + self._timeout = setTimeout(function() { + self._operationTimeoutCb(self._attempts); + }, self._operationTimeout); + + if (self._options.unref) { + self._timeout.unref(); + } + } + + self._fn(self._attempts); + }, timeout); + + if (this._options.unref) { + this._timer.unref(); + } + + return true; +}; + +RetryOperation.prototype.attempt = function(fn, timeoutOps) { + this._fn = fn; + + if (timeoutOps) { + if (timeoutOps.timeout) { + this._operationTimeout = timeoutOps.timeout; + } + if (timeoutOps.cb) { + this._operationTimeoutCb = timeoutOps.cb; + } + } + + var self = this; + if (this._operationTimeoutCb) { + this._timeout = setTimeout(function() { + self._operationTimeoutCb(); + }, self._operationTimeout); + } + + this._operationStart = new Date().getTime(); + + this._fn(this._attempts); +}; + +RetryOperation.prototype.try = function(fn) { + console.log('Using RetryOperation.try() is deprecated'); + this.attempt(fn); +}; + +RetryOperation.prototype.start = function(fn) { + console.log('Using RetryOperation.start() is deprecated'); + this.attempt(fn); +}; + +RetryOperation.prototype.start = RetryOperation.prototype.try; + +RetryOperation.prototype.errors = function() { + return this._errors; +}; + +RetryOperation.prototype.attempts = function() { + return this._attempts; +}; + +RetryOperation.prototype.mainError = function() { + if (this._errors.length === 0) { + return null; + } + + var counts = {}; + var mainError = null; + var mainErrorCount = 0; + + for (var i = 0; i < this._errors.length; i++) { + var error = this._errors[i]; + var message = error.message; + var count = (counts[message] || 0) + 1; + + counts[message] = count; + + if (count >= mainErrorCount) { + mainError = error; + mainErrorCount = count; + } + } + + return mainError; +}; + + +/***/ }), + +/***/ 3058: +/***/ ((module, exports, __nccwpck_require__) => { + +/* eslint-disable node/no-deprecated-api */ +var buffer = __nccwpck_require__(181) +var Buffer = buffer.Buffer + +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } +} +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer +} + +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) +} + +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) + +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') + } + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + } else { + buf.fill(0) + } + return buf +} + +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) +} + +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return buffer.SlowBuffer(size) +} + + /***/ }), /***/ 2560: @@ -59607,6 +83504,2463 @@ function coerce (version, options) { } +/***/ }), + +/***/ 1546: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var stubs = __nccwpck_require__(3897) + +/* + * StreamEvents can be used 2 ways: + * + * 1: + * function MyStream() { + * require('stream-events').call(this) + * } + * + * 2: + * require('stream-events')(myStream) + */ +function StreamEvents(stream) { + stream = stream || this + + var cfg = { + callthrough: true, + calls: 1 + } + + stubs(stream, '_read', cfg, stream.emit.bind(stream, 'reading')) + stubs(stream, '_write', cfg, stream.emit.bind(stream, 'writing')) + + return stream +} + +module.exports = StreamEvents + + +/***/ }), + +/***/ 4633: +/***/ ((module) => { + +module.exports = shift + +function shift (stream) { + var rs = stream._readableState + if (!rs) return null + return (rs.objectMode || typeof stream._duplexState === 'number') ? stream.read() : stream.read(getStateLength(rs)) +} + +function getStateLength (state) { + if (state.buffer.length) { + var idx = state.bufferIndex || 0 + // Since node 6.3.0 state.buffer is a BufferList not an array + if (state.buffer.head) { + return state.buffer.head.data.length + } else if (state.buffer.length - idx > 0 && state.buffer[idx]) { + return state.buffer[idx].length + } + } + + return state.length +} + + +/***/ }), + +/***/ 634: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + +/**/ + +var Buffer = (__nccwpck_require__(6132).Buffer); +/**/ + +var isEncoding = Buffer.isEncoding || function (encoding) { + encoding = '' + encoding; + switch (encoding && encoding.toLowerCase()) { + case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': + return true; + default: + return false; + } +}; + +function _normalizeEncoding(enc) { + if (!enc) return 'utf8'; + var retried; + while (true) { + switch (enc) { + case 'utf8': + case 'utf-8': + return 'utf8'; + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return 'utf16le'; + case 'latin1': + case 'binary': + return 'latin1'; + case 'base64': + case 'ascii': + case 'hex': + return enc; + default: + if (retried) return; // undefined + enc = ('' + enc).toLowerCase(); + retried = true; + } + } +}; + +// Do not cache `Buffer.isEncoding` when checking encoding names as some +// modules monkey-patch it to support additional encodings +function normalizeEncoding(enc) { + var nenc = _normalizeEncoding(enc); + if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); + return nenc || enc; +} + +// StringDecoder provides an interface for efficiently splitting a series of +// buffers into a series of JS strings without breaking apart multi-byte +// characters. +exports.I = StringDecoder; +function StringDecoder(encoding) { + this.encoding = normalizeEncoding(encoding); + var nb; + switch (this.encoding) { + case 'utf16le': + this.text = utf16Text; + this.end = utf16End; + nb = 4; + break; + case 'utf8': + this.fillLast = utf8FillLast; + nb = 4; + break; + case 'base64': + this.text = base64Text; + this.end = base64End; + nb = 3; + break; + default: + this.write = simpleWrite; + this.end = simpleEnd; + return; + } + this.lastNeed = 0; + this.lastTotal = 0; + this.lastChar = Buffer.allocUnsafe(nb); +} + +StringDecoder.prototype.write = function (buf) { + if (buf.length === 0) return ''; + var r; + var i; + if (this.lastNeed) { + r = this.fillLast(buf); + if (r === undefined) return ''; + i = this.lastNeed; + this.lastNeed = 0; + } else { + i = 0; + } + if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); + return r || ''; +}; + +StringDecoder.prototype.end = utf8End; + +// Returns only complete characters in a Buffer +StringDecoder.prototype.text = utf8Text; + +// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer +StringDecoder.prototype.fillLast = function (buf) { + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); + this.lastNeed -= buf.length; +}; + +// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a +// continuation byte. If an invalid byte is detected, -2 is returned. +function utf8CheckByte(byte) { + if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; + return byte >> 6 === 0x02 ? -1 : -2; +} + +// Checks at most 3 bytes at the end of a Buffer in order to detect an +// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) +// needed to complete the UTF-8 character (if applicable) are returned. +function utf8CheckIncomplete(self, buf, i) { + var j = buf.length - 1; + if (j < i) return 0; + var nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 1; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 2; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) { + if (nb === 2) nb = 0;else self.lastNeed = nb - 3; + } + return nb; + } + return 0; +} + +// Validates as many continuation bytes for a multi-byte UTF-8 character as +// needed or are available. If we see a non-continuation byte where we expect +// one, we "replace" the validated continuation bytes we've seen so far with +// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding +// behavior. The continuation byte check is included three times in the case +// where all of the continuation bytes for a character exist in the same buffer. +// It is also done this way as a slight performance increase instead of using a +// loop. +function utf8CheckExtraBytes(self, buf, p) { + if ((buf[0] & 0xC0) !== 0x80) { + self.lastNeed = 0; + return '\ufffd'; + } + if (self.lastNeed > 1 && buf.length > 1) { + if ((buf[1] & 0xC0) !== 0x80) { + self.lastNeed = 1; + return '\ufffd'; + } + if (self.lastNeed > 2 && buf.length > 2) { + if ((buf[2] & 0xC0) !== 0x80) { + self.lastNeed = 2; + return '\ufffd'; + } + } + } +} + +// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. +function utf8FillLast(buf) { + var p = this.lastTotal - this.lastNeed; + var r = utf8CheckExtraBytes(this, buf, p); + if (r !== undefined) return r; + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, p, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, p, 0, buf.length); + this.lastNeed -= buf.length; +} + +// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a +// partial character, the character's bytes are buffered until the required +// number of bytes are available. +function utf8Text(buf, i) { + var total = utf8CheckIncomplete(this, buf, i); + if (!this.lastNeed) return buf.toString('utf8', i); + this.lastTotal = total; + var end = buf.length - (total - this.lastNeed); + buf.copy(this.lastChar, 0, end); + return buf.toString('utf8', i, end); +} + +// For UTF-8, a replacement character is added when ending on a partial +// character. +function utf8End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + '\ufffd'; + return r; +} + +// UTF-16LE typically needs two bytes per character, but even if we have an even +// number of bytes available, we need to check if we end on a leading/high +// surrogate. In that case, we need to wait for the next two bytes in order to +// decode the last character properly. +function utf16Text(buf, i) { + if ((buf.length - i) % 2 === 0) { + var r = buf.toString('utf16le', i); + if (r) { + var c = r.charCodeAt(r.length - 1); + if (c >= 0xD800 && c <= 0xDBFF) { + this.lastNeed = 2; + this.lastTotal = 4; + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + return r.slice(0, -1); + } + } + return r; + } + this.lastNeed = 1; + this.lastTotal = 2; + this.lastChar[0] = buf[buf.length - 1]; + return buf.toString('utf16le', i, buf.length - 1); +} + +// For UTF-16LE we do not explicitly append special replacement characters if we +// end on a partial character, we simply let v8 handle that. +function utf16End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) { + var end = this.lastTotal - this.lastNeed; + return r + this.lastChar.toString('utf16le', 0, end); + } + return r; +} + +function base64Text(buf, i) { + var n = (buf.length - i) % 3; + if (n === 0) return buf.toString('base64', i); + this.lastNeed = 3 - n; + this.lastTotal = 3; + if (n === 1) { + this.lastChar[0] = buf[buf.length - 1]; + } else { + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + } + return buf.toString('base64', i, buf.length - n); +} + +function base64End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); + return r; +} + +// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) +function simpleWrite(buf) { + return buf.toString(this.encoding); +} + +function simpleEnd(buf) { + return buf && buf.length ? this.write(buf) : ''; +} + +/***/ }), + +/***/ 6132: +/***/ ((module, exports, __nccwpck_require__) => { + +/*! safe-buffer. MIT License. Feross Aboukhadijeh */ +/* eslint-disable node/no-deprecated-api */ +var buffer = __nccwpck_require__(181) +var Buffer = buffer.Buffer + +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } +} +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer +} + +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.prototype = Object.create(Buffer.prototype) + +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) + +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') + } + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + } else { + buf.fill(0) + } + return buf +} + +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) +} + +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return buffer.SlowBuffer(size) +} + + +/***/ }), + +/***/ 6496: +/***/ ((module) => { + +const hexRegex = /^[-+]?0x[a-fA-F0-9]+$/; +const numRegex = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/; +// const octRegex = /^0x[a-z0-9]+/; +// const binRegex = /0x[a-z0-9]+/; + + +const consider = { + hex : true, + // oct: false, + leadingZeros: true, + decimalPoint: "\.", + eNotation: true, + //skipLike: /regex/ +}; + +function toNumber(str, options = {}){ + options = Object.assign({}, consider, options ); + if(!str || typeof str !== "string" ) return str; + + let trimmedStr = str.trim(); + + if(options.skipLike !== undefined && options.skipLike.test(trimmedStr)) return str; + else if(str==="0") return 0; + else if (options.hex && hexRegex.test(trimmedStr)) { + return parse_int(trimmedStr, 16); + // }else if (options.oct && octRegex.test(str)) { + // return Number.parseInt(val, 8); + }else if (trimmedStr.search(/[eE]/)!== -1) { //eNotation + const notation = trimmedStr.match(/^([-\+])?(0*)([0-9]*(\.[0-9]*)?[eE][-\+]?[0-9]+)$/); + // +00.123 => [ , '+', '00', '.123', .. + if(notation){ + // console.log(notation) + if(options.leadingZeros){ //accept with leading zeros + trimmedStr = (notation[1] || "") + notation[3]; + }else{ + if(notation[2] === "0" && notation[3][0]=== "."){ //valid number + }else{ + return str; + } + } + return options.eNotation ? Number(trimmedStr) : str; + }else{ + return str; + } + // }else if (options.parseBin && binRegex.test(str)) { + // return Number.parseInt(val, 2); + }else{ + //separate negative sign, leading zeros, and rest number + const match = numRegex.exec(trimmedStr); + // +00.123 => [ , '+', '00', '.123', .. + if(match){ + const sign = match[1]; + const leadingZeros = match[2]; + let numTrimmedByZeros = trimZeros(match[3]); //complete num without leading zeros + //trim ending zeros for floating number + + if(!options.leadingZeros && leadingZeros.length > 0 && sign && trimmedStr[2] !== ".") return str; //-0123 + else if(!options.leadingZeros && leadingZeros.length > 0 && !sign && trimmedStr[1] !== ".") return str; //0123 + else if(options.leadingZeros && leadingZeros===str) return 0; //00 + + else{//no leading zeros or leading zeros are allowed + const num = Number(trimmedStr); + const numStr = "" + num; + + if(numStr.search(/[eE]/) !== -1){ //given number is long and parsed to eNotation + if(options.eNotation) return num; + else return str; + }else if(trimmedStr.indexOf(".") !== -1){ //floating number + if(numStr === "0" && (numTrimmedByZeros === "") ) return num; //0.0 + else if(numStr === numTrimmedByZeros) return num; //0.456. 0.79000 + else if( sign && numStr === "-"+numTrimmedByZeros) return num; + else return str; + } + + if(leadingZeros){ + return (numTrimmedByZeros === numStr) || (sign+numTrimmedByZeros === numStr) ? num : str + }else { + return (trimmedStr === numStr) || (trimmedStr === sign+numStr) ? num : str + } + } + }else{ //non-numeric string + return str; + } + } +} + +/** + * + * @param {string} numStr without leading zeros + * @returns + */ +function trimZeros(numStr){ + if(numStr && numStr.indexOf(".") !== -1){//float + numStr = numStr.replace(/0+$/, ""); //remove ending zeros + if(numStr === ".") numStr = "0"; + else if(numStr[0] === ".") numStr = "0"+numStr; + else if(numStr[numStr.length-1] === ".") numStr = numStr.substr(0,numStr.length-1); + return numStr; + } + return numStr; +} + +function parse_int(numStr, base){ + //polyfill + if(parseInt) return parseInt(numStr, base); + else if(Number.parseInt) return Number.parseInt(numStr, base); + else if(window && window.parseInt) return window.parseInt(numStr, base); + else throw new Error("parseInt, Number.parseInt, window.parseInt are not supported") +} + +module.exports = toNumber; + +/***/ }), + +/***/ 3897: +/***/ ((module) => { + +"use strict"; + + +module.exports = function stubs(obj, method, cfg, stub) { + if (!obj || !method || !obj[method]) + throw new Error('You must provide an object and a key for an existing method') + + if (!stub) { + stub = cfg + cfg = {} + } + + stub = stub || function() {} + + cfg.callthrough = cfg.callthrough || false + cfg.calls = cfg.calls || 0 + + var norevert = cfg.calls === 0 + + var cached = obj[method].bind(obj) + + obj[method] = function() { + var args = [].slice.call(arguments) + var returnVal + + if (cfg.callthrough) + returnVal = cached.apply(obj, args) + + returnVal = stub.apply(obj, args) || returnVal + + if (!norevert && --cfg.calls === 0) + obj[method] = cached + + return returnVal + } +} + + +/***/ }), + +/***/ 1450: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +const os = __nccwpck_require__(857); +const tty = __nccwpck_require__(2018); +const hasFlag = __nccwpck_require__(3813); + +const {env} = process; + +let forceColor; +if (hasFlag('no-color') || + hasFlag('no-colors') || + hasFlag('color=false') || + hasFlag('color=never')) { + forceColor = 0; +} else if (hasFlag('color') || + hasFlag('colors') || + hasFlag('color=true') || + hasFlag('color=always')) { + forceColor = 1; +} + +if ('FORCE_COLOR' in env) { + if (env.FORCE_COLOR === 'true') { + forceColor = 1; + } else if (env.FORCE_COLOR === 'false') { + forceColor = 0; + } else { + forceColor = env.FORCE_COLOR.length === 0 ? 1 : Math.min(parseInt(env.FORCE_COLOR, 10), 3); + } +} + +function translateLevel(level) { + if (level === 0) { + return false; + } + + return { + level, + hasBasic: true, + has256: level >= 2, + has16m: level >= 3 + }; +} + +function supportsColor(haveStream, streamIsTTY) { + if (forceColor === 0) { + return 0; + } + + if (hasFlag('color=16m') || + hasFlag('color=full') || + hasFlag('color=truecolor')) { + return 3; + } + + if (hasFlag('color=256')) { + return 2; + } + + if (haveStream && !streamIsTTY && forceColor === undefined) { + return 0; + } + + const min = forceColor || 0; + + if (env.TERM === 'dumb') { + return min; + } + + if (process.platform === 'win32') { + // Windows 10 build 10586 is the first Windows release that supports 256 colors. + // Windows 10 build 14931 is the first release that supports 16m/TrueColor. + const osRelease = os.release().split('.'); + if ( + Number(osRelease[0]) >= 10 && + Number(osRelease[2]) >= 10586 + ) { + return Number(osRelease[2]) >= 14931 ? 3 : 2; + } + + return 1; + } + + if ('CI' in env) { + if (['TRAVIS', 'CIRCLECI', 'APPVEYOR', 'GITLAB_CI', 'GITHUB_ACTIONS', 'BUILDKITE'].some(sign => sign in env) || env.CI_NAME === 'codeship') { + return 1; + } + + return min; + } + + if ('TEAMCITY_VERSION' in env) { + return /^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0; + } + + if (env.COLORTERM === 'truecolor') { + return 3; + } + + if ('TERM_PROGRAM' in env) { + const version = parseInt((env.TERM_PROGRAM_VERSION || '').split('.')[0], 10); + + switch (env.TERM_PROGRAM) { + case 'iTerm.app': + return version >= 3 ? 3 : 2; + case 'Apple_Terminal': + return 2; + // No default + } + } + + if (/-256(color)?$/i.test(env.TERM)) { + return 2; + } + + if (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) { + return 1; + } + + if ('COLORTERM' in env) { + return 1; + } + + return min; +} + +function getSupportLevel(stream) { + const level = supportsColor(stream, stream && stream.isTTY); + return translateLevel(level); +} + +module.exports = { + supportsColor: getSupportLevel, + stdout: translateLevel(supportsColor(true, tty.isatty(1))), + stderr: translateLevel(supportsColor(true, tty.isatty(2))) +}; + + +/***/ }), + +/***/ 7745: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/** + * @license + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.TeenyStatistics = exports.TeenyStatisticsWarning = void 0; +/** + * @class TeenyStatisticsWarning + * @extends Error + * @description While an error, is used for emitting warnings when + * meeting certain configured thresholds. + * @see process.emitWarning + */ +class TeenyStatisticsWarning extends Error { + /** + * @param {string} message + */ + constructor(message) { + super(message); + this.threshold = 0; + this.type = ''; + this.value = 0; + this.name = this.constructor.name; + Error.captureStackTrace(this, this.constructor); + } +} +exports.TeenyStatisticsWarning = TeenyStatisticsWarning; +TeenyStatisticsWarning.CONCURRENT_REQUESTS = 'ConcurrentRequestsExceededWarning'; +/** + * @class TeenyStatistics + * @description Maintain various statistics internal to teeny-request. Tracking + * is not automatic and must be instrumented within teeny-request. + */ +class TeenyStatistics { + /** + * @param {TeenyStatisticsOptions} [opts] + */ + constructor(opts) { + /** + * @type {number} + * @private + * @default 0 + */ + this._concurrentRequests = 0; + /** + * @type {boolean} + * @private + * @default false + */ + this._didConcurrentRequestWarn = false; + this._options = TeenyStatistics._prepareOptions(opts); + } + /** + * Returns a copy of the current options. + * @return {TeenyStatisticsOptions} + */ + getOptions() { + return Object.assign({}, this._options); + } + /** + * Change configured statistics options. This will not preserve unspecified + * options that were previously specified, i.e. this is a reset of options. + * @param {TeenyStatisticsOptions} [opts] + * @returns {TeenyStatisticsConfig} The previous options. + * @see _prepareOptions + */ + setOptions(opts) { + const oldOpts = this._options; + this._options = TeenyStatistics._prepareOptions(opts); + return oldOpts; + } + /** + * @readonly + * @return {TeenyStatisticsCounters} + */ + get counters() { + return { + concurrentRequests: this._concurrentRequests, + }; + } + /** + * @description Should call this right before making a request. + */ + requestStarting() { + this._concurrentRequests++; + if (this._options.concurrentRequests > 0 && + this._concurrentRequests >= this._options.concurrentRequests && + !this._didConcurrentRequestWarn) { + this._didConcurrentRequestWarn = true; + const warning = new TeenyStatisticsWarning('Possible excessive concurrent requests detected. ' + + this._concurrentRequests + + ' requests in-flight, which exceeds the configured threshold of ' + + this._options.concurrentRequests + + '. Use the TEENY_REQUEST_WARN_CONCURRENT_REQUESTS environment ' + + 'variable or the concurrentRequests option of teeny-request to ' + + 'increase or disable (0) this warning.'); + warning.type = TeenyStatisticsWarning.CONCURRENT_REQUESTS; + warning.value = this._concurrentRequests; + warning.threshold = this._options.concurrentRequests; + process.emitWarning(warning); + } + } + /** + * @description When using `requestStarting`, call this after the request + * has finished. + */ + requestFinished() { + // TODO negative? + this._concurrentRequests--; + } + /** + * Configuration Precedence: + * 1. Dependency inversion via defined option. + * 2. Global numeric environment variable. + * 3. Built-in default. + * This will not preserve unspecified options previously specified. + * @param {TeenyStatisticsOptions} [opts] + * @returns {TeenyStatisticsOptions} + * @private + */ + static _prepareOptions({ concurrentRequests: diConcurrentRequests, } = {}) { + let concurrentRequests = this.DEFAULT_WARN_CONCURRENT_REQUESTS; + const envConcurrentRequests = Number(process.env.TEENY_REQUEST_WARN_CONCURRENT_REQUESTS); + if (diConcurrentRequests !== undefined) { + concurrentRequests = diConcurrentRequests; + } + else if (!Number.isNaN(envConcurrentRequests)) { + concurrentRequests = envConcurrentRequests; + } + return { concurrentRequests }; + } +} +exports.TeenyStatistics = TeenyStatistics; +/** + * @description A default threshold representing when to warn about excessive + * in-flight/concurrent requests. + * @type {number} + * @static + * @readonly + * @default 5000 + */ +TeenyStatistics.DEFAULT_WARN_CONCURRENT_REQUESTS = 5000; +//# sourceMappingURL=TeenyStatistics.js.map + +/***/ }), + +/***/ 4003: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/** + * @license + * Copyright 2019 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getAgent = exports.pool = void 0; +const http_1 = __nccwpck_require__(8611); +const https_1 = __nccwpck_require__(5692); +// eslint-disable-next-line node/no-deprecated-api +const url_1 = __nccwpck_require__(7016); +exports.pool = new Map(); +/** + * Determines if a proxy should be considered based on the environment. + * + * @param uri The request uri + * @returns {boolean} + */ +function shouldUseProxyForURI(uri) { + const noProxyEnv = process.env.NO_PROXY || process.env.no_proxy; + if (!noProxyEnv) { + return true; + } + const givenURI = new URL(uri); + for (const noProxyRaw of noProxyEnv.split(',')) { + const noProxy = noProxyRaw.trim(); + if (noProxy === givenURI.origin || noProxy === givenURI.hostname) { + return false; + } + else if (noProxy.startsWith('*.') || noProxy.startsWith('.')) { + const noProxyWildcard = noProxy.replace(/^\*\./, '.'); + if (givenURI.hostname.endsWith(noProxyWildcard)) { + return false; + } + } + } + return true; +} +/** + * Returns a custom request Agent if one is found, otherwise returns undefined + * which will result in the global http(s) Agent being used. + * @private + * @param {string} uri The request uri + * @param {Options} reqOpts The request options + * @returns {HttpAnyAgent|undefined} + */ +function getAgent(uri, reqOpts) { + const isHttp = uri.startsWith('http://'); + const proxy = reqOpts.proxy || + process.env.HTTP_PROXY || + process.env.http_proxy || + process.env.HTTPS_PROXY || + process.env.https_proxy; + const poolOptions = Object.assign({}, reqOpts.pool); + const manuallyProvidedProxy = !!reqOpts.proxy; + const shouldUseProxy = manuallyProvidedProxy || shouldUseProxyForURI(uri); + if (proxy && shouldUseProxy) { + // tslint:disable-next-line variable-name + const Agent = isHttp + ? __nccwpck_require__(1970) + : __nccwpck_require__(6518); + const proxyOpts = { ...(0, url_1.parse)(proxy), ...poolOptions }; + return new Agent(proxyOpts); + } + let key = isHttp ? 'http' : 'https'; + if (reqOpts.forever) { + key += ':forever'; + if (!exports.pool.has(key)) { + // tslint:disable-next-line variable-name + const Agent = isHttp ? http_1.Agent : https_1.Agent; + exports.pool.set(key, new Agent({ ...poolOptions, keepAlive: true })); + } + } + return exports.pool.get(key); +} +exports.getAgent = getAgent; +//# sourceMappingURL=agents.js.map + +/***/ }), + +/***/ 321: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/** + * @license + * Copyright 2018 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.teenyRequest = exports.RequestError = void 0; +const node_fetch_1 = __nccwpck_require__(6705); +const stream_1 = __nccwpck_require__(2203); +const uuid = __nccwpck_require__(8993); +const agents_1 = __nccwpck_require__(4003); +const TeenyStatistics_1 = __nccwpck_require__(7745); +// eslint-disable-next-line @typescript-eslint/no-var-requires +const streamEvents = __nccwpck_require__(1546); +class RequestError extends Error { +} +exports.RequestError = RequestError; +/** + * Convert options from Request to Fetch format + * @private + * @param reqOpts Request options + */ +function requestToFetchOptions(reqOpts) { + const options = { + method: reqOpts.method || 'GET', + ...(reqOpts.timeout && { timeout: reqOpts.timeout }), + ...(typeof reqOpts.gzip === 'boolean' && { compress: reqOpts.gzip }), + }; + if (typeof reqOpts.json === 'object') { + // Add Content-type: application/json header + reqOpts.headers = reqOpts.headers || {}; + reqOpts.headers['Content-Type'] = 'application/json'; + // Set body to JSON representation of value + options.body = JSON.stringify(reqOpts.json); + } + else { + if (Buffer.isBuffer(reqOpts.body)) { + options.body = reqOpts.body; + } + else if (typeof reqOpts.body !== 'string') { + options.body = JSON.stringify(reqOpts.body); + } + else { + options.body = reqOpts.body; + } + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + options.headers = reqOpts.headers; + let uri = (reqOpts.uri || + reqOpts.url); + if (!uri) { + throw new Error('Missing uri or url in reqOpts.'); + } + if (reqOpts.useQuerystring === true || typeof reqOpts.qs === 'object') { + // eslint-disable-next-line @typescript-eslint/no-var-requires + const qs = __nccwpck_require__(3480); + const params = qs.stringify(reqOpts.qs); + uri = uri + '?' + params; + } + options.agent = (0, agents_1.getAgent)(uri, reqOpts); + return { uri, options }; +} +/** + * Convert a response from `fetch` to `request` format. + * @private + * @param opts The `request` options used to create the request. + * @param res The Fetch response + * @returns A `request` response object + */ +function fetchToRequestResponse(opts, res) { + const request = {}; + request.agent = opts.agent || false; + request.headers = (opts.headers || {}); + request.href = res.url; + // headers need to be converted from a map to an obj + const resHeaders = {}; + res.headers.forEach((value, key) => (resHeaders[key] = value)); + const response = Object.assign(res.body, { + statusCode: res.status, + statusMessage: res.statusText, + request, + body: res.body, + headers: resHeaders, + toJSON: () => ({ headers: resHeaders }), + }); + return response; +} +/** + * Create POST body from two parts as multipart/related content-type + * @private + * @param boundary + * @param multipart + */ +function createMultipartStream(boundary, multipart) { + const finale = `--${boundary}--`; + const stream = new stream_1.PassThrough(); + for (const part of multipart) { + const preamble = `--${boundary}\r\nContent-Type: ${part['Content-Type']}\r\n\r\n`; + stream.write(preamble); + if (typeof part.body === 'string') { + stream.write(part.body); + stream.write('\r\n'); + } + else { + part.body.pipe(stream, { end: false }); + part.body.on('end', () => { + stream.write('\r\n'); + stream.write(finale); + stream.end(); + }); + } + } + return stream; +} +function teenyRequest(reqOpts, callback) { + const { uri, options } = requestToFetchOptions(reqOpts); + const multipart = reqOpts.multipart; + if (reqOpts.multipart && multipart.length === 2) { + if (!callback) { + // TODO: add support for multipart uploads through streaming + throw new Error('Multipart without callback is not implemented.'); + } + const boundary = uuid.v4(); + options.headers['Content-Type'] = `multipart/related; boundary=${boundary}`; + options.body = createMultipartStream(boundary, multipart); + // Multipart upload + teenyRequest.stats.requestStarting(); + (0, node_fetch_1.default)(uri, options).then(res => { + teenyRequest.stats.requestFinished(); + const header = res.headers.get('content-type'); + const response = fetchToRequestResponse(options, res); + const body = response.body; + if (header === 'application/json' || + header === 'application/json; charset=utf-8') { + res.json().then(json => { + response.body = json; + callback(null, response, json); + }, (err) => { + callback(err, response, body); + }); + return; + } + res.text().then(text => { + response.body = text; + callback(null, response, text); + }, err => { + callback(err, response, body); + }); + }, err => { + teenyRequest.stats.requestFinished(); + callback(err, null, null); + }); + return; + } + if (callback === undefined) { + // Stream mode + const requestStream = streamEvents(new stream_1.PassThrough()); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let responseStream; + requestStream.once('reading', () => { + if (responseStream) { + (0, stream_1.pipeline)(responseStream, requestStream, () => { }); + } + else { + requestStream.once('response', () => { + (0, stream_1.pipeline)(responseStream, requestStream, () => { }); + }); + } + }); + options.compress = false; + teenyRequest.stats.requestStarting(); + (0, node_fetch_1.default)(uri, options).then(res => { + teenyRequest.stats.requestFinished(); + responseStream = res.body; + responseStream.on('error', (err) => { + requestStream.emit('error', err); + }); + const response = fetchToRequestResponse(options, res); + requestStream.emit('response', response); + }, err => { + teenyRequest.stats.requestFinished(); + requestStream.emit('error', err); + }); + // fetch doesn't supply the raw HTTP stream, instead it + // returns a PassThrough piped from the HTTP response + // stream. + return requestStream; + } + // GET or POST with callback + teenyRequest.stats.requestStarting(); + (0, node_fetch_1.default)(uri, options).then(res => { + teenyRequest.stats.requestFinished(); + const header = res.headers.get('content-type'); + const response = fetchToRequestResponse(options, res); + const body = response.body; + if (header === 'application/json' || + header === 'application/json; charset=utf-8') { + if (response.statusCode === 204) { + // Probably a DELETE + callback(null, response, body); + return; + } + res.json().then(json => { + response.body = json; + callback(null, response, json); + }, err => { + callback(err, response, body); + }); + return; + } + res.text().then(text => { + const response = fetchToRequestResponse(options, res); + response.body = text; + callback(null, response, text); + }, err => { + callback(err, response, body); + }); + }, err => { + teenyRequest.stats.requestFinished(); + callback(err, null, null); + }); + return; +} +exports.teenyRequest = teenyRequest; +teenyRequest.defaults = (defaults) => { + return (reqOpts, callback) => { + const opts = { ...defaults, ...reqOpts }; + if (callback === undefined) { + return teenyRequest(opts); + } + teenyRequest(opts, callback); + }; +}; +/** + * Single instance of an interface for keeping track of things. + */ +teenyRequest.stats = new TeenyStatistics_1.TeenyStatistics(); +teenyRequest.resetStats = () => { + teenyRequest.stats = new TeenyStatistics_1.TeenyStatistics(teenyRequest.stats.getOptions()); +}; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 7954: +/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const events_1 = __nccwpck_require__(4434); +const debug_1 = __importDefault(__nccwpck_require__(2830)); +const promisify_1 = __importDefault(__nccwpck_require__(4446)); +const debug = debug_1.default('agent-base'); +function isAgent(v) { + return Boolean(v) && typeof v.addRequest === 'function'; +} +function isSecureEndpoint() { + const { stack } = new Error(); + if (typeof stack !== 'string') + return false; + return stack.split('\n').some(l => l.indexOf('(https.js:') !== -1 || l.indexOf('node:https:') !== -1); +} +function createAgent(callback, opts) { + return new createAgent.Agent(callback, opts); +} +(function (createAgent) { + /** + * Base `http.Agent` implementation. + * No pooling/keep-alive is implemented by default. + * + * @param {Function} callback + * @api public + */ + class Agent extends events_1.EventEmitter { + constructor(callback, _opts) { + super(); + let opts = _opts; + if (typeof callback === 'function') { + this.callback = callback; + } + else if (callback) { + opts = callback; + } + // Timeout for the socket to be returned from the callback + this.timeout = null; + if (opts && typeof opts.timeout === 'number') { + this.timeout = opts.timeout; + } + // These aren't actually used by `agent-base`, but are required + // for the TypeScript definition files in `@types/node` :/ + this.maxFreeSockets = 1; + this.maxSockets = 1; + this.maxTotalSockets = Infinity; + this.sockets = {}; + this.freeSockets = {}; + this.requests = {}; + this.options = {}; + } + get defaultPort() { + if (typeof this.explicitDefaultPort === 'number') { + return this.explicitDefaultPort; + } + return isSecureEndpoint() ? 443 : 80; + } + set defaultPort(v) { + this.explicitDefaultPort = v; + } + get protocol() { + if (typeof this.explicitProtocol === 'string') { + return this.explicitProtocol; + } + return isSecureEndpoint() ? 'https:' : 'http:'; + } + set protocol(v) { + this.explicitProtocol = v; + } + callback(req, opts, fn) { + throw new Error('"agent-base" has no default implementation, you must subclass and override `callback()`'); + } + /** + * Called by node-core's "_http_client.js" module when creating + * a new HTTP request with this Agent instance. + * + * @api public + */ + addRequest(req, _opts) { + const opts = Object.assign({}, _opts); + if (typeof opts.secureEndpoint !== 'boolean') { + opts.secureEndpoint = isSecureEndpoint(); + } + if (opts.host == null) { + opts.host = 'localhost'; + } + if (opts.port == null) { + opts.port = opts.secureEndpoint ? 443 : 80; + } + if (opts.protocol == null) { + opts.protocol = opts.secureEndpoint ? 'https:' : 'http:'; + } + if (opts.host && opts.path) { + // If both a `host` and `path` are specified then it's most + // likely the result of a `url.parse()` call... we need to + // remove the `path` portion so that `net.connect()` doesn't + // attempt to open that as a unix socket file. + delete opts.path; + } + delete opts.agent; + delete opts.hostname; + delete opts._defaultAgent; + delete opts.defaultPort; + delete opts.createConnection; + // Hint to use "Connection: close" + // XXX: non-documented `http` module API :( + req._last = true; + req.shouldKeepAlive = false; + let timedOut = false; + let timeoutId = null; + const timeoutMs = opts.timeout || this.timeout; + const onerror = (err) => { + if (req._hadError) + return; + req.emit('error', err); + // For Safety. Some additional errors might fire later on + // and we need to make sure we don't double-fire the error event. + req._hadError = true; + }; + const ontimeout = () => { + timeoutId = null; + timedOut = true; + const err = new Error(`A "socket" was not created for HTTP request before ${timeoutMs}ms`); + err.code = 'ETIMEOUT'; + onerror(err); + }; + const callbackError = (err) => { + if (timedOut) + return; + if (timeoutId !== null) { + clearTimeout(timeoutId); + timeoutId = null; + } + onerror(err); + }; + const onsocket = (socket) => { + if (timedOut) + return; + if (timeoutId != null) { + clearTimeout(timeoutId); + timeoutId = null; + } + if (isAgent(socket)) { + // `socket` is actually an `http.Agent` instance, so + // relinquish responsibility for this `req` to the Agent + // from here on + debug('Callback returned another Agent instance %o', socket.constructor.name); + socket.addRequest(req, opts); + return; + } + if (socket) { + socket.once('free', () => { + this.freeSocket(socket, opts); + }); + req.onSocket(socket); + return; + } + const err = new Error(`no Duplex stream was returned to agent-base for \`${req.method} ${req.path}\``); + onerror(err); + }; + if (typeof this.callback !== 'function') { + onerror(new Error('`callback` is not defined')); + return; + } + if (!this.promisifiedCallback) { + if (this.callback.length >= 3) { + debug('Converting legacy callback function to promise'); + this.promisifiedCallback = promisify_1.default(this.callback); + } + else { + this.promisifiedCallback = this.callback; + } + } + if (typeof timeoutMs === 'number' && timeoutMs > 0) { + timeoutId = setTimeout(ontimeout, timeoutMs); + } + if ('port' in opts && typeof opts.port !== 'number') { + opts.port = Number(opts.port); + } + try { + debug('Resolving socket for %o request: %o', opts.protocol, `${req.method} ${req.path}`); + Promise.resolve(this.promisifiedCallback(req, opts)).then(onsocket, callbackError); + } + catch (err) { + Promise.reject(err).catch(callbackError); + } + } + freeSocket(socket, opts) { + debug('Freeing socket %o %o', socket.constructor.name, opts); + socket.destroy(); + } + destroy() { + debug('Destroying agent %o', this.constructor.name); + } + } + createAgent.Agent = Agent; + // So that `instanceof` works correctly + createAgent.prototype = createAgent.Agent.prototype; +})(createAgent || (createAgent = {})); +module.exports = createAgent; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 4446: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +function promisify(fn) { + return function (req, opts) { + return new Promise((resolve, reject) => { + fn.call(this, req, opts, (err, rtn) => { + if (err) { + reject(err); + } + else { + resolve(rtn); + } + }); + }); + }; +} +exports["default"] = promisify; +//# sourceMappingURL=promisify.js.map + +/***/ }), + +/***/ 5299: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const net_1 = __importDefault(__nccwpck_require__(9278)); +const tls_1 = __importDefault(__nccwpck_require__(4756)); +const url_1 = __importDefault(__nccwpck_require__(7016)); +const assert_1 = __importDefault(__nccwpck_require__(2613)); +const debug_1 = __importDefault(__nccwpck_require__(2830)); +const agent_base_1 = __nccwpck_require__(7954); +const parse_proxy_response_1 = __importDefault(__nccwpck_require__(7742)); +const debug = debug_1.default('https-proxy-agent:agent'); +/** + * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to + * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests. + * + * Outgoing HTTP requests are first tunneled through the proxy server using the + * `CONNECT` HTTP request method to establish a connection to the proxy server, + * and then the proxy server connects to the destination target and issues the + * HTTP request from the proxy server. + * + * `https:` requests have their socket connection upgraded to TLS once + * the connection to the proxy server has been established. + * + * @api public + */ +class HttpsProxyAgent extends agent_base_1.Agent { + constructor(_opts) { + let opts; + if (typeof _opts === 'string') { + opts = url_1.default.parse(_opts); + } + else { + opts = _opts; + } + if (!opts) { + throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!'); + } + debug('creating new HttpsProxyAgent instance: %o', opts); + super(opts); + const proxy = Object.assign({}, opts); + // If `true`, then connect to the proxy server over TLS. + // Defaults to `false`. + this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol); + // Prefer `hostname` over `host`, and set the `port` if needed. + proxy.host = proxy.hostname || proxy.host; + if (typeof proxy.port === 'string') { + proxy.port = parseInt(proxy.port, 10); + } + if (!proxy.port && proxy.host) { + proxy.port = this.secureProxy ? 443 : 80; + } + // ALPN is supported by Node.js >= v5. + // attempt to negotiate http/1.1 for proxy servers that support http/2 + if (this.secureProxy && !('ALPNProtocols' in proxy)) { + proxy.ALPNProtocols = ['http 1.1']; + } + if (proxy.host && proxy.path) { + // If both a `host` and `path` are specified then it's most likely + // the result of a `url.parse()` call... we need to remove the + // `path` portion so that `net.connect()` doesn't attempt to open + // that as a Unix socket file. + delete proxy.path; + delete proxy.pathname; + } + this.proxy = proxy; + } + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + * + * @api protected + */ + callback(req, opts) { + return __awaiter(this, void 0, void 0, function* () { + const { proxy, secureProxy } = this; + // Create a socket connection to the proxy server. + let socket; + if (secureProxy) { + debug('Creating `tls.Socket`: %o', proxy); + socket = tls_1.default.connect(proxy); + } + else { + debug('Creating `net.Socket`: %o', proxy); + socket = net_1.default.connect(proxy); + } + const headers = Object.assign({}, proxy.headers); + const hostname = `${opts.host}:${opts.port}`; + let payload = `CONNECT ${hostname} HTTP/1.1\r\n`; + // Inject the `Proxy-Authorization` header if necessary. + if (proxy.auth) { + headers['Proxy-Authorization'] = `Basic ${Buffer.from(proxy.auth).toString('base64')}`; + } + // The `Host` header should only include the port + // number when it is not the default port. + let { host, port, secureEndpoint } = opts; + if (!isDefaultPort(port, secureEndpoint)) { + host += `:${port}`; + } + headers.Host = host; + headers.Connection = 'close'; + for (const name of Object.keys(headers)) { + payload += `${name}: ${headers[name]}\r\n`; + } + const proxyResponsePromise = parse_proxy_response_1.default(socket); + socket.write(`${payload}\r\n`); + const { statusCode, buffered } = yield proxyResponsePromise; + if (statusCode === 200) { + req.once('socket', resume); + if (opts.secureEndpoint) { + // The proxy is connecting to a TLS server, so upgrade + // this socket connection to a TLS connection. + debug('Upgrading socket connection to TLS'); + const servername = opts.servername || opts.host; + return tls_1.default.connect(Object.assign(Object.assign({}, omit(opts, 'host', 'hostname', 'path', 'port')), { socket, + servername })); + } + return socket; + } + // Some other status code that's not 200... need to re-play the HTTP + // header "data" events onto the socket once the HTTP machinery is + // attached so that the node core `http` can parse and handle the + // error status code. + // Close the original socket, and a new "fake" socket is returned + // instead, so that the proxy doesn't get the HTTP request + // written to it (which may contain `Authorization` headers or other + // sensitive data). + // + // See: https://hackerone.com/reports/541502 + socket.destroy(); + const fakeSocket = new net_1.default.Socket({ writable: false }); + fakeSocket.readable = true; + // Need to wait for the "socket" event to re-play the "data" events. + req.once('socket', (s) => { + debug('replaying proxy buffer for failed request'); + assert_1.default(s.listenerCount('data') > 0); + // Replay the "buffered" Buffer onto the fake `socket`, since at + // this point the HTTP module machinery has been hooked up for + // the user. + s.push(buffered); + s.push(null); + }); + return fakeSocket; + }); + } +} +exports["default"] = HttpsProxyAgent; +function resume(socket) { + socket.resume(); +} +function isDefaultPort(port, secure) { + return Boolean((!secure && port === 80) || (secure && port === 443)); +} +function isHTTPS(protocol) { + return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false; +} +function omit(obj, ...keys) { + const ret = {}; + let key; + for (key in obj) { + if (!keys.includes(key)) { + ret[key] = obj[key]; + } + } + return ret; +} +//# sourceMappingURL=agent.js.map + +/***/ }), + +/***/ 6518: +/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const agent_1 = __importDefault(__nccwpck_require__(5299)); +function createHttpsProxyAgent(opts) { + return new agent_1.default(opts); +} +(function (createHttpsProxyAgent) { + createHttpsProxyAgent.HttpsProxyAgent = agent_1.default; + createHttpsProxyAgent.prototype = agent_1.default.prototype; +})(createHttpsProxyAgent || (createHttpsProxyAgent = {})); +module.exports = createHttpsProxyAgent; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 7742: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const debug_1 = __importDefault(__nccwpck_require__(2830)); +const debug = debug_1.default('https-proxy-agent:parse-proxy-response'); +function parseProxyResponse(socket) { + return new Promise((resolve, reject) => { + // we need to buffer any HTTP traffic that happens with the proxy before we get + // the CONNECT response, so that if the response is anything other than an "200" + // response code, then we can re-play the "data" events on the socket once the + // HTTP parser is hooked up... + let buffersLength = 0; + const buffers = []; + function read() { + const b = socket.read(); + if (b) + ondata(b); + else + socket.once('readable', read); + } + function cleanup() { + socket.removeListener('end', onend); + socket.removeListener('error', onerror); + socket.removeListener('close', onclose); + socket.removeListener('readable', read); + } + function onclose(err) { + debug('onclose had error %o', err); + } + function onend() { + debug('onend'); + } + function onerror(err) { + cleanup(); + debug('onerror %o', err); + reject(err); + } + function ondata(b) { + buffers.push(b); + buffersLength += b.length; + const buffered = Buffer.concat(buffers, buffersLength); + const endOfHeaders = buffered.indexOf('\r\n\r\n'); + if (endOfHeaders === -1) { + // keep buffering + debug('have not received end of HTTP headers yet...'); + read(); + return; + } + const firstLine = buffered.toString('ascii', 0, buffered.indexOf('\r\n')); + const statusCode = +firstLine.split(' ')[1]; + debug('got proxy server response: %o', firstLine); + resolve({ + statusCode, + buffered + }); + } + socket.on('error', onerror); + socket.on('close', onclose); + socket.on('end', onend); + read(); + }); +} +exports["default"] = parseProxyResponse; +//# sourceMappingURL=parse-proxy-response.js.map + +/***/ }), + +/***/ 8993: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +Object.defineProperty(exports, "NIL", ({ + enumerable: true, + get: function () { + return _nil.default; + } +})); +Object.defineProperty(exports, "parse", ({ + enumerable: true, + get: function () { + return _parse.default; + } +})); +Object.defineProperty(exports, "stringify", ({ + enumerable: true, + get: function () { + return _stringify.default; + } +})); +Object.defineProperty(exports, "v1", ({ + enumerable: true, + get: function () { + return _v.default; + } +})); +Object.defineProperty(exports, "v3", ({ + enumerable: true, + get: function () { + return _v2.default; + } +})); +Object.defineProperty(exports, "v4", ({ + enumerable: true, + get: function () { + return _v3.default; + } +})); +Object.defineProperty(exports, "v5", ({ + enumerable: true, + get: function () { + return _v4.default; + } +})); +Object.defineProperty(exports, "validate", ({ + enumerable: true, + get: function () { + return _validate.default; + } +})); +Object.defineProperty(exports, "version", ({ + enumerable: true, + get: function () { + return _version.default; + } +})); + +var _v = _interopRequireDefault(__nccwpck_require__(4684)); + +var _v2 = _interopRequireDefault(__nccwpck_require__(3142)); + +var _v3 = _interopRequireDefault(__nccwpck_require__(9655)); + +var _v4 = _interopRequireDefault(__nccwpck_require__(880)); + +var _nil = _interopRequireDefault(__nccwpck_require__(194)); + +var _version = _interopRequireDefault(__nccwpck_require__(6257)); + +var _validate = _interopRequireDefault(__nccwpck_require__(1579)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(1400)); + +var _parse = _interopRequireDefault(__nccwpck_require__(7814)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/***/ }), + +/***/ 8061: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); +} + +var _default = md5; +exports["default"] = _default; + +/***/ }), + +/***/ 7966: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var _default = { + randomUUID: _crypto.default.randomUUID +}; +exports["default"] = _default; + +/***/ }), + +/***/ 194: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports["default"] = _default; + +/***/ }), + +/***/ 7814: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(1579)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports["default"] = _default; + +/***/ }), + +/***/ 1118: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports["default"] = _default; + +/***/ }), + +/***/ 8540: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = rng; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; + +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); + + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} + +/***/ }), + +/***/ 1352: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('sha1').update(bytes).digest(); +} + +var _default = sha1; +exports["default"] = _default; + +/***/ }), + +/***/ 1400: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +exports.unsafeStringify = unsafeStringify; + +var _validate = _interopRequireDefault(__nccwpck_require__(1579)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).slice(1)); +} + +function unsafeStringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]; +} + +function stringify(arr, offset = 0) { + const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports["default"] = _default; + +/***/ }), + +/***/ 4684: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(8540)); + +var _stringify = __nccwpck_require__(1400); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.unsafeStringify)(b); +} + +var _default = v1; +exports["default"] = _default; + +/***/ }), + +/***/ 3142: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(4575)); + +var _md = _interopRequireDefault(__nccwpck_require__(8061)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports["default"] = _default; + +/***/ }), + +/***/ 4575: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports.URL = exports.DNS = void 0; +exports["default"] = v35; + +var _stringify = __nccwpck_require__(1400); + +var _parse = _interopRequireDefault(__nccwpck_require__(7814)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function v35(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + var _namespace; + + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} + +/***/ }), + +/***/ 9655: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _native = _interopRequireDefault(__nccwpck_require__(7966)); + +var _rng = _interopRequireDefault(__nccwpck_require__(8540)); + +var _stringify = __nccwpck_require__(1400); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + if (_native.default.randomUUID && !buf && !options) { + return _native.default.randomUUID(); + } + + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(rnds); +} + +var _default = v4; +exports["default"] = _default; + +/***/ }), + +/***/ 880: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(4575)); + +var _sha = _interopRequireDefault(__nccwpck_require__(1352)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports["default"] = _default; + +/***/ }), + +/***/ 1579: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _regex = _interopRequireDefault(__nccwpck_require__(1118)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports["default"] = _default; + +/***/ }), + +/***/ 6257: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(1579)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.slice(14, 15), 16); +} + +var _default = version; +exports["default"] = _default; + /***/ }), /***/ 770: @@ -59887,6 +86241,705 @@ if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { exports.debug = debug; // for test +/***/ }), + +/***/ 4488: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + + +/** + * For Node.js, simply re-export the core `util.deprecate` function. + */ + +module.exports = __nccwpck_require__(9023).deprecate; + + +/***/ }), + +/***/ 2048: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +Object.defineProperty(exports, "v1", ({ + enumerable: true, + get: function () { + return _v.default; + } +})); +Object.defineProperty(exports, "v3", ({ + enumerable: true, + get: function () { + return _v2.default; + } +})); +Object.defineProperty(exports, "v4", ({ + enumerable: true, + get: function () { + return _v3.default; + } +})); +Object.defineProperty(exports, "v5", ({ + enumerable: true, + get: function () { + return _v4.default; + } +})); +Object.defineProperty(exports, "NIL", ({ + enumerable: true, + get: function () { + return _nil.default; + } +})); +Object.defineProperty(exports, "version", ({ + enumerable: true, + get: function () { + return _version.default; + } +})); +Object.defineProperty(exports, "validate", ({ + enumerable: true, + get: function () { + return _validate.default; + } +})); +Object.defineProperty(exports, "stringify", ({ + enumerable: true, + get: function () { + return _stringify.default; + } +})); +Object.defineProperty(exports, "parse", ({ + enumerable: true, + get: function () { + return _parse.default; + } +})); + +var _v = _interopRequireDefault(__nccwpck_require__(6415)); + +var _v2 = _interopRequireDefault(__nccwpck_require__(1697)); + +var _v3 = _interopRequireDefault(__nccwpck_require__(4676)); + +var _v4 = _interopRequireDefault(__nccwpck_require__(9771)); + +var _nil = _interopRequireDefault(__nccwpck_require__(7723)); + +var _version = _interopRequireDefault(__nccwpck_require__(5868)); + +var _validate = _interopRequireDefault(__nccwpck_require__(6200)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(7597)); + +var _parse = _interopRequireDefault(__nccwpck_require__(7267)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/***/ }), + +/***/ 216: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); +} + +var _default = md5; +exports["default"] = _default; + +/***/ }), + +/***/ 7723: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports["default"] = _default; + +/***/ }), + +/***/ 7267: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(6200)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports["default"] = _default; + +/***/ }), + +/***/ 7879: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports["default"] = _default; + +/***/ }), + +/***/ 2973: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = rng; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; + +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); + + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} + +/***/ }), + +/***/ 507: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('sha1').update(bytes).digest(); +} + +var _default = sha1; +exports["default"] = _default; + +/***/ }), + +/***/ 7597: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(6200)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).substr(1)); +} + +function stringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports["default"] = _default; + +/***/ }), + +/***/ 6415: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(2973)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(7597)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.default)(b); +} + +var _default = v1; +exports["default"] = _default; + +/***/ }), + +/***/ 1697: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(2930)); + +var _md = _interopRequireDefault(__nccwpck_require__(216)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports["default"] = _default; + +/***/ }), + +/***/ 2930: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = _default; +exports.URL = exports.DNS = void 0; + +var _stringify = _interopRequireDefault(__nccwpck_require__(7597)); + +var _parse = _interopRequireDefault(__nccwpck_require__(7267)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function _default(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (namespace.length !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.default)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} + +/***/ }), + +/***/ 4676: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(2973)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(7597)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.default)(rnds); +} + +var _default = v4; +exports["default"] = _default; + +/***/ }), + +/***/ 9771: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(2930)); + +var _sha = _interopRequireDefault(__nccwpck_require__(507)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports["default"] = _default; + +/***/ }), + +/***/ 6200: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _regex = _interopRequireDefault(__nccwpck_require__(7879)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports["default"] = _default; + +/***/ }), + +/***/ 5868: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(6200)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.substr(14, 1), 16); +} + +var _default = version; +exports["default"] = _default; + +/***/ }), + +/***/ 8264: +/***/ ((module) => { + +// Returns a wrapper function that returns a wrapped callback +// The wrapper function should do some stuff, and return a +// presumably different callback function. +// This makes sure that own properties are retained, so that +// decorations and such are not lost along the way. +module.exports = wrappy +function wrappy (fn, cb) { + if (fn && cb) return wrappy(fn)(cb) + + if (typeof fn !== 'function') + throw new TypeError('need wrapper function') + + Object.keys(fn).forEach(function (k) { + wrapper[k] = fn[k] + }) + + return wrapper + + function wrapper() { + var args = new Array(arguments.length) + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i] + } + var ret = fn.apply(this, args) + var cb = args[args.length-1] + if (typeof ret === 'function' && ret !== cb) { + Object.keys(cb).forEach(function (k) { + ret[k] = cb[k] + }) + } + return ret + } +} + + /***/ }), /***/ 8736: @@ -64894,6 +91947,81 @@ exports.debug = debug; // for test }).call(this); +/***/ }), + +/***/ 538: +/***/ ((module) => { + +class Node { + /// value; + /// next; + + constructor(value) { + this.value = value; + + // TODO: Remove this when targeting Node.js 12. + this.next = undefined; + } +} + +class Queue { + // TODO: Use private class fields when targeting Node.js 12. + // #_head; + // #_tail; + // #_size; + + constructor() { + this.clear(); + } + + enqueue(value) { + const node = new Node(value); + + if (this._head) { + this._tail.next = node; + this._tail = node; + } else { + this._head = node; + this._tail = node; + } + + this._size++; + } + + dequeue() { + const current = this._head; + if (!current) { + return; + } + + this._head = this._head.next; + this._size--; + return current.value; + } + + clear() { + this._head = undefined; + this._tail = undefined; + this._size = 0; + } + + get size() { + return this._size; + } + + * [Symbol.iterator]() { + let current = this._head; + + while (current) { + yield current.value; + current = current.next; + } + } +} + +module.exports = Queue; + + /***/ }), /***/ 7242: @@ -64911,25 +92039,27 @@ var Inputs; Inputs["UploadChunkSize"] = "upload-chunk-size"; Inputs["EnableCrossOsArchive"] = "enableCrossOsArchive"; Inputs["FailOnCacheMiss"] = "fail-on-cache-miss"; - Inputs["LookupOnly"] = "lookup-only"; // Input for cache, restore action -})(Inputs = exports.Inputs || (exports.Inputs = {})); + Inputs["LookupOnly"] = "lookup-only"; + Inputs["GCSBucket"] = "gcs-bucket"; + Inputs["GCSPathPrefix"] = "gcs-path-prefix"; // Input for cache, restore, save action +})(Inputs || (exports.Inputs = Inputs = {})); var Outputs; (function (Outputs) { Outputs["CacheHit"] = "cache-hit"; Outputs["CachePrimaryKey"] = "cache-primary-key"; Outputs["CacheMatchedKey"] = "cache-matched-key"; // Output from restore action -})(Outputs = exports.Outputs || (exports.Outputs = {})); +})(Outputs || (exports.Outputs = Outputs = {})); var State; (function (State) { State["CachePrimaryKey"] = "CACHE_KEY"; State["CacheMatchedKey"] = "CACHE_RESULT"; -})(State = exports.State || (exports.State = {})); +})(State || (exports.State = State = {})); var Events; (function (Events) { Events["Key"] = "GITHUB_EVENT_NAME"; Events["Push"] = "push"; Events["PullRequest"] = "pull_request"; -})(Events = exports.Events || (exports.Events = {})); +})(Events || (exports.Events = Events = {})); exports.RefKey = "GITHUB_REF"; @@ -64956,13 +92086,23 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( }) : function(o, v) { o["default"] = v; }); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { @@ -64973,8 +92113,10 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.saveRun = exports.saveOnlyRun = exports.saveImpl = void 0; -const cache = __importStar(__nccwpck_require__(5116)); +exports.saveImpl = saveImpl; +exports.saveOnlyRun = saveOnlyRun; +exports.saveRun = saveRun; +const cache = __importStar(__nccwpck_require__(9614)); const core = __importStar(__nccwpck_require__(7484)); const constants_1 = __nccwpck_require__(7242); const stateProvider_1 = __nccwpck_require__(2879); @@ -65024,7 +92166,6 @@ function saveImpl(stateProvider) { return cacheId; }); } -exports.saveImpl = saveImpl; function saveOnlyRun(earlyExit) { return __awaiter(this, void 0, void 0, function* () { try { @@ -65049,7 +92190,6 @@ function saveOnlyRun(earlyExit) { } }); } -exports.saveOnlyRun = saveOnlyRun; function saveRun(earlyExit) { return __awaiter(this, void 0, void 0, function* () { try { @@ -65071,7 +92211,6 @@ function saveRun(earlyExit) { } }); } -exports.saveRun = saveRun; /***/ }), @@ -65097,13 +92236,23 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( }) : function(o, v) { o["default"] = v; }); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); Object.defineProperty(exports, "__esModule", ({ value: true })); exports.NullStateProvider = exports.StateProvider = void 0; const core = __importStar(__nccwpck_require__(7484)); @@ -65172,15 +92321,33 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( }) : function(o, v) { o["default"] = v; }); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.isCacheFeatureAvailable = exports.getInputAsBool = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.isExactKeyMatch = exports.isGhes = void 0; +exports.isGhes = isGhes; +exports.isExactKeyMatch = isExactKeyMatch; +exports.logWarning = logWarning; +exports.isValidEvent = isValidEvent; +exports.getInputAsArray = getInputAsArray; +exports.getInputAsInt = getInputAsInt; +exports.getInputAsBool = getInputAsBool; +exports.isGCSAvailable = isGCSAvailable; +exports.isCacheFeatureAvailable = isCacheFeatureAvailable; const cache = __importStar(__nccwpck_require__(5116)); const core = __importStar(__nccwpck_require__(7484)); const constants_1 = __nccwpck_require__(7242); @@ -65192,25 +92359,21 @@ function isGhes() { const isLocalHost = hostname.endsWith(".LOCALHOST"); return !isGitHubHost && !isGitHubEnterpriseCloudHost && !isLocalHost; } -exports.isGhes = isGhes; function isExactKeyMatch(key, cacheKey) { return !!(cacheKey && cacheKey.localeCompare(key, undefined, { sensitivity: "accent" }) === 0); } -exports.isExactKeyMatch = isExactKeyMatch; function logWarning(message) { const warningPrefix = "[warning]"; core.info(`${warningPrefix}${message}`); } -exports.logWarning = logWarning; // Cache token authorized for all events that are tied to a ref // See GitHub Context https://help.github.com/actions/automating-your-workflow-with-github-actions/contexts-and-expression-syntax-for-github-actions#github-context function isValidEvent() { return constants_1.RefKey in process.env && Boolean(process.env[constants_1.RefKey]); } -exports.isValidEvent = isValidEvent; function getInputAsArray(name, options) { return core .getInput(name, options) @@ -65218,7 +92381,6 @@ function getInputAsArray(name, options) { .map(s => s.replace(/^!\s+/, "!").trim()) .filter(x => x !== ""); } -exports.getInputAsArray = getInputAsArray; function getInputAsInt(name, options) { const value = parseInt(core.getInput(name, options)); if (isNaN(value) || value < 0) { @@ -65226,13 +92388,31 @@ function getInputAsInt(name, options) { } return value; } -exports.getInputAsInt = getInputAsInt; function getInputAsBool(name, options) { const result = core.getInput(name, options); return result.toLowerCase() === "true"; } -exports.getInputAsBool = getInputAsBool; +// Check if GCS is configured and available +function isGCSAvailable() { + try { + const bucket = core.getInput(constants_1.Inputs.GCSBucket); + if (!bucket) { + core.info("GCS bucket name not provided, falling back to GitHub cache"); + return false; + } + return true; + } + catch (error) { + logWarning(`Failed to check GCS availability: ${error.message}`); + return false; + } +} function isCacheFeatureAvailable() { + // Check if GCS cache is available + if (isGCSAvailable()) { + return true; + } + // Otherwise, check GitHub cache if (cache.isFeatureAvailable()) { return true; } @@ -65244,7 +92424,242 @@ Otherwise please upgrade to GHES version >= 3.5 and If you are also using Github logWarning("An internal error has occurred in cache backend. Please check https://www.githubstatus.com/ for any ongoing issue in actions."); return false; } -exports.isCacheFeatureAvailable = isCacheFeatureAvailable; + + +/***/ }), + +/***/ 9614: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.restoreCache = restoreCache; +exports.saveCache = saveCache; +exports.isFeatureAvailable = isFeatureAvailable; +const core = __importStar(__nccwpck_require__(7484)); +const path = __importStar(__nccwpck_require__(6928)); +const constants_1 = __nccwpck_require__(7242); +const actionUtils_1 = __nccwpck_require__(8270); +const utils = __importStar(__nccwpck_require__(8299)); +const storage_1 = __nccwpck_require__(8525); +const cache = __importStar(__nccwpck_require__(5116)); +const tar_1 = __nccwpck_require__(5321); +const DEFAULT_PATH_PREFIX = "github-cache"; +// Function to initialize GCS client using Application Default Credentials +function getGCSClient() { + try { + core.info("Initializing GCS client"); + return new storage_1.Storage(); + } + catch (error) { + core.warning(`Failed to initialize GCS client: ${error.message}`); + return null; + } +} +function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive) { + return __awaiter(this, void 0, void 0, function* () { + // Check if GCS is available + if ((0, actionUtils_1.isGCSAvailable)()) { + try { + const result = yield restoreFromGCS(paths, primaryKey, restoreKeys, options); + if (result) { + core.info(`Cache restored from GCS with key: ${result}`); + return result; + } + core.info("Cache not found in GCS, falling back to GitHub cache"); + } + catch (error) { + core.warning(`Failed to restore from GCS: ${error.message}`); + core.info("Falling back to GitHub cache"); + } + } + // Fall back to GitHub cache + return yield cache.restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive); + }); +} +function saveCache(paths, key, options, enableCrossOsArchive) { + return __awaiter(this, void 0, void 0, function* () { + if ((0, actionUtils_1.isGCSAvailable)()) { + try { + const result = yield saveToGCS(paths, key); + if (result) { + core.info(`Cache saved to GCS with key: ${key}`); + return result; // Success ID + } + core.warning("Failed to save to GCS, falling back to GitHub cache"); + } + catch (error) { + core.warning(`Failed to save to GCS: ${error.message}`); + core.info("Falling back to GitHub cache"); + } + } + // Fall back to GitHub cache + return yield cache.saveCache(paths, key, options, enableCrossOsArchive); + }); +} +// Function that checks if the cache feature is available (either GCS or GitHub cache) +function isFeatureAvailable() { + return (0, actionUtils_1.isGCSAvailable)() || cache.isFeatureAvailable(); +} +function restoreFromGCS(_paths_1, primaryKey_1) { + return __awaiter(this, arguments, void 0, function* (_paths, // validate paths? + primaryKey, restoreKeys = [], options) { + const storage = getGCSClient(); + if (!storage) { + return undefined; + } + const bucket = core.getInput(constants_1.Inputs.GCSBucket); + const pathPrefix = core.getInput(constants_1.Inputs.GCSPathPrefix) || DEFAULT_PATH_PREFIX; + const compressionMethod = yield utils.getCompressionMethod(); + const archiveFolder = yield utils.createTempDirectory(); + const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + const keys = [primaryKey, ...restoreKeys]; + const gcsPath = yield findFileOnGCS(storage, bucket, pathPrefix, keys, compressionMethod); + if (!gcsPath) { + core.info(`No matching cache found`); + return undefined; + } + // If lookup only, just return the key + if (options === null || options === void 0 ? void 0 : options.lookupOnly) { + core.info(`Cache found in GCS with key: ${gcsPath}`); + return gcsPath; + } + try { + core.info(`Downloading from GCS: ${bucket}/${gcsPath}`); + const file = storage.bucket(bucket).file(gcsPath); + yield file.download({ destination: archivePath }); + if (core.isDebug()) { + yield (0, tar_1.listTar)(archivePath, compressionMethod); + } + const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); + core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + yield (0, tar_1.extractTar)(archivePath, compressionMethod); + core.info('Cache restored successfully'); + return gcsPath; + } + catch (error) { + core.warning(`Failed to restore: ${error.message}`); + } + finally { + try { + yield utils.unlinkFile(archivePath); + } + catch (error) { + core.debug(`Failed to delete archive: ${error}`); + } + } + }); +} +function getGCSPath(pathPrefix, key, compressionMethod) { + return `${pathPrefix}/${key}.${utils.getCacheFileName(compressionMethod)}`; +} +function saveToGCS(paths, key) { + return __awaiter(this, void 0, void 0, function* () { + let cacheId = -1; + const storage = getGCSClient(); + if (!storage) { + return cacheId; + } + const bucket = core.getInput(constants_1.Inputs.GCSBucket); + const pathPrefix = core.getInput(constants_1.Inputs.GCSPathPrefix) || DEFAULT_PATH_PREFIX; + const compressionMethod = yield utils.getCompressionMethod(); + const cachePaths = yield utils.resolvePaths(paths); + core.debug('Cache Paths:'); + core.debug(`${JSON.stringify(cachePaths)}`); + if (cachePaths.length === 0) { + throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); + } + const archiveFolder = yield utils.createTempDirectory(); + const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + core.debug(`Archive Path: ${archivePath}`); + try { + yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); + if (core.isDebug()) { + yield (0, tar_1.listTar)(archivePath, compressionMethod); + } + const gcsPath = getGCSPath(pathPrefix, key, compressionMethod); + core.info(`Uploading to GCS: ${bucket}/${gcsPath}`); + yield storage.bucket(bucket).upload(archivePath, { + destination: gcsPath, + resumable: true, // this may not be necessary + }); + return 1; + } + catch (error) { + core.warning(`Error creating or uploading cache: ${error.message}`); + return -1; + } + finally { + try { + yield utils.unlinkFile(archivePath); + } + catch (error) { + core.debug(`Failed to delete archive: ${error}`); + } + } + }); +} +function findFileOnGCS(storage, bucket, pathPrefix, keys, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + for (const key of keys) { + const gcsPath = getGCSPath(pathPrefix, key, compressionMethod); + if (yield checkFileExists(storage, bucket, gcsPath)) { + core.info(`Found file on bucket: ${bucket} with key: ${gcsPath}`); + return gcsPath; + } + } + return undefined; + }); +} +function checkFileExists(storage, bucket, path) { + return __awaiter(this, void 0, void 0, function* () { + const [exists] = yield storage.bucket(bucket).file(path).exists(); + return exists; + }); +} /***/ }), @@ -65329,6 +92744,30 @@ module.exports = require("net"); /***/ }), +/***/ 8474: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:events"); + +/***/ }), + +/***/ 1708: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:process"); + +/***/ }), + +/***/ 7975: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:util"); + +/***/ }), + /***/ 857: /***/ ((module) => { @@ -65353,6 +92792,14 @@ module.exports = require("punycode"); /***/ }), +/***/ 3480: +/***/ ((module) => { + +"use strict"; +module.exports = require("querystring"); + +/***/ }), + /***/ 2203: /***/ ((module) => { @@ -65385,6 +92832,14 @@ module.exports = require("tls"); /***/ }), +/***/ 2018: +/***/ ((module) => { + +"use strict"; +module.exports = require("tty"); + +/***/ }), + /***/ 7016: /***/ ((module) => { @@ -65407,6 +92862,14404 @@ module.exports = require("util"); "use strict"; module.exports = require("zlib"); +/***/ }), + +/***/ 6637: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AclRoleAccessorMethods = exports.Acl = void 0; +const promisify_1 = __nccwpck_require__(206); +/** + * Attach functionality to a {@link Storage.acl} instance. This will add an + * object for each role group (owners, readers, and writers), with each object + * containing methods to add or delete a type of entity. + * + * As an example, here are a few methods that are created. + * + * myBucket.acl.readers.deleteGroup('groupId', function(err) {}); + * + * myBucket.acl.owners.addUser('email@example.com', function(err, acl) {}); + * + * myBucket.acl.writers.addDomain('example.com', function(err, acl) {}); + * + * @private + */ +class AclRoleAccessorMethods { + constructor() { + this.owners = {}; + this.readers = {}; + this.writers = {}; + /** + * An object of convenience methods to add or delete owner ACL permissions + * for a given entity. + * + * The supported methods include: + * + * - `myFile.acl.owners.addAllAuthenticatedUsers` + * - `myFile.acl.owners.deleteAllAuthenticatedUsers` + * - `myFile.acl.owners.addAllUsers` + * - `myFile.acl.owners.deleteAllUsers` + * - `myFile.acl.owners.addDomain` + * - `myFile.acl.owners.deleteDomain` + * - `myFile.acl.owners.addGroup` + * - `myFile.acl.owners.deleteGroup` + * - `myFile.acl.owners.addProject` + * - `myFile.acl.owners.deleteProject` + * - `myFile.acl.owners.addUser` + * - `myFile.acl.owners.deleteUser` + * + * @name Acl#owners + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * //- + * // Add a user as an owner of a file. + * //- + * const myBucket = gcs.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * myFile.acl.owners.addUser('email@example.com', function(err, aclObject) + * {}); + * + * //- + * // For reference, the above command is the same as running the following. + * //- + * myFile.acl.add({ + * entity: 'user-email@example.com', + * role: gcs.acl.OWNER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.owners.addUser('email@example.com').then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + this.owners = {}; + /** + * An object of convenience methods to add or delete reader ACL permissions + * for a given entity. + * + * The supported methods include: + * + * - `myFile.acl.readers.addAllAuthenticatedUsers` + * - `myFile.acl.readers.deleteAllAuthenticatedUsers` + * - `myFile.acl.readers.addAllUsers` + * - `myFile.acl.readers.deleteAllUsers` + * - `myFile.acl.readers.addDomain` + * - `myFile.acl.readers.deleteDomain` + * - `myFile.acl.readers.addGroup` + * - `myFile.acl.readers.deleteGroup` + * - `myFile.acl.readers.addProject` + * - `myFile.acl.readers.deleteProject` + * - `myFile.acl.readers.addUser` + * - `myFile.acl.readers.deleteUser` + * + * @name Acl#readers + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * //- + * // Add a user as a reader of a file. + * //- + * myFile.acl.readers.addUser('email@example.com', function(err, aclObject) + * {}); + * + * //- + * // For reference, the above command is the same as running the following. + * //- + * myFile.acl.add({ + * entity: 'user-email@example.com', + * role: gcs.acl.READER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.readers.addUser('email@example.com').then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + this.readers = {}; + /** + * An object of convenience methods to add or delete writer ACL permissions + * for a given entity. + * + * The supported methods include: + * + * - `myFile.acl.writers.addAllAuthenticatedUsers` + * - `myFile.acl.writers.deleteAllAuthenticatedUsers` + * - `myFile.acl.writers.addAllUsers` + * - `myFile.acl.writers.deleteAllUsers` + * - `myFile.acl.writers.addDomain` + * - `myFile.acl.writers.deleteDomain` + * - `myFile.acl.writers.addGroup` + * - `myFile.acl.writers.deleteGroup` + * - `myFile.acl.writers.addProject` + * - `myFile.acl.writers.deleteProject` + * - `myFile.acl.writers.addUser` + * - `myFile.acl.writers.deleteUser` + * + * @name Acl#writers + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * //- + * // Add a user as a writer of a file. + * //- + * myFile.acl.writers.addUser('email@example.com', function(err, aclObject) + * {}); + * + * //- + * // For reference, the above command is the same as running the following. + * //- + * myFile.acl.add({ + * entity: 'user-email@example.com', + * role: gcs.acl.WRITER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.writers.addUser('email@example.com').then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + this.writers = {}; + AclRoleAccessorMethods.roles.forEach(this._assignAccessMethods.bind(this)); + } + _assignAccessMethods(role) { + const accessMethods = AclRoleAccessorMethods.accessMethods; + const entities = AclRoleAccessorMethods.entities; + const roleGroup = role.toLowerCase() + 's'; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this[roleGroup] = entities.reduce((acc, entity) => { + const isPrefix = entity.charAt(entity.length - 1) === '-'; + accessMethods.forEach(accessMethod => { + let method = accessMethod + entity[0].toUpperCase() + entity.substring(1); + if (isPrefix) { + method = method.replace('-', ''); + } + // Wrap the parent accessor method (e.g. `add` or `delete`) to avoid the + // more complex API of specifying an `entity` and `role`. + // eslint-disable-next-line @typescript-eslint/no-explicit-any + acc[method] = (entityId, options, callback) => { + let apiEntity; + if (typeof options === 'function') { + callback = options; + options = {}; + } + if (isPrefix) { + apiEntity = entity + entityId; + } + else { + // If the entity is not a prefix, it is a special entity group + // that does not require further details. The accessor methods + // only accept a callback. + apiEntity = entity; + callback = entityId; + } + options = Object.assign({ + entity: apiEntity, + role, + }, options); + const args = [options]; + if (typeof callback === 'function') { + args.push(callback); + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + return this[accessMethod].apply(this, args); + }; + }); + return acc; + }, {}); + } +} +exports.AclRoleAccessorMethods = AclRoleAccessorMethods; +AclRoleAccessorMethods.accessMethods = ['add', 'delete']; +AclRoleAccessorMethods.entities = [ + // Special entity groups that do not require further specification. + 'allAuthenticatedUsers', + 'allUsers', + // Entity groups that require specification, e.g. `user-email@example.com`. + 'domain-', + 'group-', + 'project-', + 'user-', +]; +AclRoleAccessorMethods.roles = ['OWNER', 'READER', 'WRITER']; +/** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * An ACL consists of one or more entries, where each entry grants permissions + * to an entity. Permissions define the actions that can be performed against an + * object or bucket (for example, `READ` or `WRITE`); the entity defines who the + * permission applies to (for example, a specific user or group of users). + * + * Where an `entity` value is accepted, we follow the format the Cloud Storage + * API expects. + * + * Refer to + * https://cloud.google.com/storage/docs/json_api/v1/defaultObjectAccessControls + * for the most up-to-date values. + * + * - `user-userId` + * - `user-email` + * - `group-groupId` + * - `group-email` + * - `domain-domain` + * - `project-team-projectId` + * - `allUsers` + * - `allAuthenticatedUsers` + * + * Examples: + * + * - The user "liz@example.com" would be `user-liz@example.com`. + * - The group "example@googlegroups.com" would be + * `group-example@googlegroups.com`. + * - To refer to all members of the Google Apps for Business domain + * "example.com", the entity would be `domain-example.com`. + * + * For more detailed information, see + * {@link http://goo.gl/6qBBPO| About Access Control Lists}. + * + * @constructor Acl + * @mixin + * @param {object} options Configuration options. + */ +class Acl extends AclRoleAccessorMethods { + constructor(options) { + super(); + this.pathPrefix = options.pathPrefix; + this.request_ = options.request; + } + /** + * @typedef {array} AddAclResponse + * @property {object} 0 The Acl Objects. + * @property {object} 1 The full API response. + */ + /** + * @callback AddAclCallback + * @param {?Error} err Request error, if any. + * @param {object} acl The Acl Objects. + * @param {object} apiResponse The full API response. + */ + /** + * Add access controls on a {@link Bucket} or {@link File}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/insert| BucketAccessControls: insert API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/insert| ObjectAccessControls: insert API Documentation} + * + * @param {object} options Configuration options. + * @param {string} options.entity Whose permissions will be added. + * @param {string} options.role Permissions allowed for the defined entity. + * See {@link https://cloud.google.com/storage/docs/access-control Access + * Control}. + * @param {number} [options.generation] **File Objects Only** Select a specific + * revision of this file (as opposed to the latest version, the default). + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {AddAclCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * const options = { + * entity: 'user-useremail@example.com', + * role: gcs.acl.OWNER_ROLE + * }; + * + * myBucket.acl.add(options, function(err, aclObject, apiResponse) {}); + * + * //- + * // For file ACL operations, you can also specify a `generation` property. + * // Here is how you would grant ownership permissions to a user on a + * specific + * // revision of a file. + * //- + * myFile.acl.add({ + * entity: 'user-useremail@example.com', + * role: gcs.acl.OWNER_ROLE, + * generation: 1 + * }, function(err, aclObject, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_add_file_owner + * Example of adding an owner to a file: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_owner + * Example of adding an owner to a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_default_owner + * Example of adding a default owner to a bucket: + */ + add(options, callback) { + const query = {}; + if (options.generation) { + query.generation = options.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + this.request({ + method: 'POST', + uri: '', + qs: query, + maxRetries: 0, //explicitly set this value since this is a non-idempotent function + json: { + entity: options.entity, + role: options.role.toUpperCase(), + }, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + callback(null, this.makeAclObject_(resp), resp); + }); + } + /** + * @typedef {array} RemoveAclResponse + * @property {object} 0 The full API response. + */ + /** + * @callback RemoveAclCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Delete access controls on a {@link Bucket} or {@link File}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/delete| BucketAccessControls: delete API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/delete| ObjectAccessControls: delete API Documentation} + * + * @param {object} options Configuration object. + * @param {string} options.entity Whose permissions will be revoked. + * @param {int} [options.generation] **File Objects Only** Select a specific + * revision of this file (as opposed to the latest version, the default). + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {RemoveAclCallback} callback The callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * myBucket.acl.delete({ + * entity: 'user-useremail@example.com' + * }, function(err, apiResponse) {}); + * + * //- + * // For file ACL operations, you can also specify a `generation` property. + * //- + * myFile.acl.delete({ + * entity: 'user-useremail@example.com', + * generation: 1 + * }, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.delete().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_owner + * Example of removing an owner from a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_default_owner + * Example of removing a default owner from a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_file_owner + * Example of removing an owner from a bucket: + */ + delete(options, callback) { + const query = {}; + if (options.generation) { + query.generation = options.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + this.request({ + method: 'DELETE', + uri: '/' + encodeURIComponent(options.entity), + qs: query, + }, (err, resp) => { + callback(err, resp); + }); + } + /** + * @typedef {array} GetAclResponse + * @property {object|object[]} 0 Single or array of Acl Objects. + * @property {object} 1 The full API response. + */ + /** + * @callback GetAclCallback + * @param {?Error} err Request error, if any. + * @param {object|object[]} acl Single or array of Acl Objects. + * @param {object} apiResponse The full API response. + */ + /** + * Get access controls on a {@link Bucket} or {@link File}. If + * an entity is omitted, you will receive an array of all applicable access + * controls. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/get| BucketAccessControls: get API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/get| ObjectAccessControls: get API Documentation} + * + * @param {object|function} [options] Configuration options. If you want to + * receive a list of all access controls, pass the callback function as + * the only argument. + * @param {string} options.entity Whose permissions will be fetched. + * @param {number} [options.generation] **File Objects Only** Select a specific + * revision of this file (as opposed to the latest version, the default). + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetAclCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * myBucket.acl.get({ + * entity: 'user-useremail@example.com' + * }, function(err, aclObject, apiResponse) {}); + * + * //- + * // Get all access controls. + * //- + * myBucket.acl.get(function(err, aclObjects, apiResponse) { + * // aclObjects = [ + * // { + * // entity: 'user-useremail@example.com', + * // role: 'owner' + * // } + * // ] + * }); + * + * //- + * // For file ACL operations, you can also specify a `generation` property. + * //- + * myFile.acl.get({ + * entity: 'user-useremail@example.com', + * generation: 1 + * }, function(err, aclObject, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.acl.get().then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_print_file_acl + * Example of printing a file's ACL: + * + * @example include:samples/acl.js + * region_tag:storage_print_file_acl_for_user + * Example of printing a file's ACL for a specific user: + * + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl + * Example of printing a bucket's ACL: + * + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl_for_user + * Example of printing a bucket's ACL for a specific user: + */ + get(optionsOrCallback, cb) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : null; + const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + let path = ''; + const query = {}; + if (options) { + path = '/' + encodeURIComponent(options.entity); + if (options.generation) { + query.generation = options.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + } + this.request({ + uri: path, + qs: query, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + let results; + if (resp.items) { + results = resp.items.map(this.makeAclObject_); + } + else { + results = this.makeAclObject_(resp); + } + callback(null, results, resp); + }); + } + /** + * @typedef {array} UpdateAclResponse + * @property {object} 0 The updated Acl Objects. + * @property {object} 1 The full API response. + */ + /** + * @callback UpdateAclCallback + * @param {?Error} err Request error, if any. + * @param {object} acl The updated Acl Objects. + * @param {object} apiResponse The full API response. + */ + /** + * Update access controls on a {@link Bucket} or {@link File}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/update| BucketAccessControls: update API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/update| ObjectAccessControls: update API Documentation} + * + * @param {object} options Configuration options. + * @param {string} options.entity Whose permissions will be updated. + * @param {string} options.role Permissions allowed for the defined entity. + * See {@link Storage.acl}. + * @param {number} [options.generation] **File Objects Only** Select a specific + * revision of this file (as opposed to the latest version, the default). + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {UpdateAclCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * const options = { + * entity: 'user-useremail@example.com', + * role: gcs.acl.WRITER_ROLE + * }; + * + * myBucket.acl.update(options, function(err, aclObject, apiResponse) {}); + * + * //- + * // For file ACL operations, you can also specify a `generation` property. + * //- + * myFile.acl.update({ + * entity: 'user-useremail@example.com', + * role: gcs.acl.WRITER_ROLE, + * generation: 1 + * }, function(err, aclObject, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.update(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + update(options, callback) { + const query = {}; + if (options.generation) { + query.generation = options.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + this.request({ + method: 'PUT', + uri: '/' + encodeURIComponent(options.entity), + qs: query, + json: { + role: options.role.toUpperCase(), + }, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + callback(null, this.makeAclObject_(resp), resp); + }); + } + /** + * Transform API responses to a consistent object format. + * + * @private + */ + makeAclObject_(accessControlObject) { + const obj = { + entity: accessControlObject.entity, + role: accessControlObject.role, + }; + if (accessControlObject.projectTeam) { + obj.projectTeam = accessControlObject.projectTeam; + } + return obj; + } + /** + * Patch requests up to the bucket's request object. + * + * @private + * + * @param {string} method Action. + * @param {string} path Request path. + * @param {*} query Request query object. + * @param {*} body Request body contents. + * @param {function} callback Callback function. + */ + request(reqOpts, callback) { + reqOpts.uri = this.pathPrefix + reqOpts.uri; + this.request_(reqOpts, callback); + } +} +exports.Acl = Acl; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Acl, { + exclude: ['request'], +}); + + +/***/ }), + +/***/ 2887: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Bucket = exports.BucketExceptionMessages = exports.AvailableServiceObjectMethods = exports.BucketActionToHTTPMethod = void 0; +const index_js_1 = __nccwpck_require__(1325); +const paginator_1 = __nccwpck_require__(9469); +const promisify_1 = __nccwpck_require__(206); +const fs = __importStar(__nccwpck_require__(9896)); +const mime_1 = __importDefault(__nccwpck_require__(4900)); +const path = __importStar(__nccwpck_require__(6928)); +const p_limit_1 = __importDefault(__nccwpck_require__(9977)); +const util_1 = __nccwpck_require__(9023); +const async_retry_1 = __importDefault(__nccwpck_require__(5195)); +const util_js_1 = __nccwpck_require__(4557); +const acl_js_1 = __nccwpck_require__(6637); +const file_js_1 = __nccwpck_require__(9975); +const iam_js_1 = __nccwpck_require__(2880); +const notification_js_1 = __nccwpck_require__(8598); +const storage_js_1 = __nccwpck_require__(2848); +const signer_js_1 = __nccwpck_require__(7319); +const stream_1 = __nccwpck_require__(2203); +const url_1 = __nccwpck_require__(7016); +var BucketActionToHTTPMethod; +(function (BucketActionToHTTPMethod) { + BucketActionToHTTPMethod["list"] = "GET"; +})(BucketActionToHTTPMethod || (exports.BucketActionToHTTPMethod = BucketActionToHTTPMethod = {})); +var AvailableServiceObjectMethods; +(function (AvailableServiceObjectMethods) { + AvailableServiceObjectMethods[AvailableServiceObjectMethods["setMetadata"] = 0] = "setMetadata"; + AvailableServiceObjectMethods[AvailableServiceObjectMethods["delete"] = 1] = "delete"; +})(AvailableServiceObjectMethods || (exports.AvailableServiceObjectMethods = AvailableServiceObjectMethods = {})); +var BucketExceptionMessages; +(function (BucketExceptionMessages) { + BucketExceptionMessages["PROVIDE_SOURCE_FILE"] = "You must provide at least one source file."; + BucketExceptionMessages["DESTINATION_FILE_NOT_SPECIFIED"] = "A destination file must be specified."; + BucketExceptionMessages["CHANNEL_ID_REQUIRED"] = "An ID is required to create a channel."; + BucketExceptionMessages["TOPIC_NAME_REQUIRED"] = "A valid topic name is required."; + BucketExceptionMessages["CONFIGURATION_OBJECT_PREFIX_REQUIRED"] = "A configuration object with a prefix is required."; + BucketExceptionMessages["SPECIFY_FILE_NAME"] = "A file name must be specified."; + BucketExceptionMessages["METAGENERATION_NOT_PROVIDED"] = "A metageneration must be provided."; + BucketExceptionMessages["SUPPLY_NOTIFICATION_ID"] = "You must supply a notification ID."; +})(BucketExceptionMessages || (exports.BucketExceptionMessages = BucketExceptionMessages = {})); +/** + * @callback Crc32cGeneratorToStringCallback + * A method returning the CRC32C as a base64-encoded string. + * + * @returns {string} + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ +/** + * @callback Crc32cGeneratorValidateCallback + * A method validating a base64-encoded CRC32C string. + * + * @param {string} [value] base64-encoded CRC32C string to validate + * @returns {boolean} + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + **/ +/** + * @callback Crc32cGeneratorUpdateCallback + * A method for passing `Buffer`s for CRC32C generation. + * + * @param {Buffer} [data] data to update CRC32C value with + * @returns {undefined} + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + **/ +/** + * @typedef {object} CRC32CValidator + * @property {Crc32cGeneratorToStringCallback} + * @property {Crc32cGeneratorValidateCallback} + * @property {Crc32cGeneratorUpdateCallback} + */ +/** + * A function that generates a CRC32C Validator. Defaults to {@link CRC32C} + * + * @name Bucket#crc32cGenerator + * @type {CRC32CValidator} + */ +/** + * Get and set IAM policies for your bucket. + * + * @name Bucket#iam + * @mixes Iam + * + * See {@link https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management| Cloud Storage IAM Management} + * See {@link https://cloud.google.com/iam/docs/granting-changing-revoking-access| Granting, Changing, and Revoking Access} + * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Get the IAM policy for your bucket. + * //- + * bucket.iam.getPolicy(function(err, policy) { + * console.log(policy); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.getPolicy().then(function(data) { + * const policy = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/iam.js + * region_tag:storage_view_bucket_iam_members + * Example of retrieving a bucket's IAM policy: + * + * @example include:samples/iam.js + * region_tag:storage_add_bucket_iam_member + * Example of adding to a bucket's IAM policy: + * + * @example include:samples/iam.js + * region_tag:storage_remove_bucket_iam_member + * Example of removing from a bucket's IAM policy: + */ +/** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * An ACL consists of one or more entries, where each entry grants permissions + * to an entity. Permissions define the actions that can be performed against + * an object or bucket (for example, `READ` or `WRITE`); the entity defines + * who the permission applies to (for example, a specific user or group of + * users). + * + * The `acl` object on a Bucket instance provides methods to get you a list of + * the ACLs defined on your bucket, as well as set, update, and delete them. + * + * Buckets also have + * {@link https://cloud.google.com/storage/docs/access-control/lists#default| default ACLs} + * for all created files. Default ACLs specify permissions that all new + * objects added to the bucket will inherit by default. You can add, delete, + * get, and update entities and permissions for these as well with + * {@link Bucket#acl.default}. + * + * See {@link http://goo.gl/6qBBPO| About Access Control Lists} + * See {@link https://cloud.google.com/storage/docs/access-control/lists#default| Default ACLs} + * + * @name Bucket#acl + * @mixes Acl + * @property {Acl} default Cloud Storage Buckets have + * {@link https://cloud.google.com/storage/docs/access-control/lists#default| default ACLs} + * for all created files. You can add, delete, get, and update entities and + * permissions for these as well. The method signatures and examples are all + * the same, after only prefixing the method call with `default`. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Make a bucket's contents publicly readable. + * //- + * const myBucket = storage.bucket('my-bucket'); + * + * const options = { + * entity: 'allUsers', + * role: storage.acl.READER_ROLE + * }; + * + * myBucket.acl.add(options, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl + * Example of printing a bucket's ACL: + * + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl_for_user + * Example of printing a bucket's ACL for a specific user: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_owner + * Example of adding an owner to a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_owner + * Example of removing an owner from a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_default_owner + * Example of adding a default owner to a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_default_owner + * Example of removing a default owner from a bucket: + */ +/** + * The API-formatted resource description of the bucket. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name Bucket#metadata + * @type {object} + */ +/** + * The bucket's name. + * @name Bucket#name + * @type {string} + */ +/** + * Get {@link File} objects for the files currently in the bucket as a + * readable object stream. + * + * @method Bucket#getFilesStream + * @param {GetFilesOptions} [query] Query object for listing files. + * @returns {ReadableStream} A readable stream that emits {@link File} instances. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getFilesStream() + * .on('error', console.error) + * .on('data', function(file) { + * // file is a File object. + * }) + * .on('end', function() { + * // All files retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * bucket.getFilesStream() + * .on('data', function(file) { + * this.end(); + * }); + * + * //- + * // If you're filtering files with a delimiter, you should use + * // {@link Bucket#getFiles} and set `autoPaginate: false` in order to + * // preserve the `apiResponse` argument. + * //- + * const prefixes = []; + * + * function callback(err, files, nextQuery, apiResponse) { + * prefixes = prefixes.concat(apiResponse.prefixes); + * + * if (nextQuery) { + * bucket.getFiles(nextQuery, callback); + * } else { + * // prefixes = The finished array of prefixes. + * } + * } + * + * bucket.getFiles({ + * autoPaginate: false, + * delimiter: '/' + * }, callback); + * ``` + */ +/** + * Create a Bucket object to interact with a Cloud Storage bucket. + * + * @class + * @hideconstructor + * + * @param {Storage} storage A {@link Storage} instance. + * @param {string} name The name of the bucket. + * @param {object} [options] Configuration object. + * @param {string} [options.userProject] User project. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * ``` + */ +class Bucket extends index_js_1.ServiceObject { + // eslint-disable-next-line @typescript-eslint/no-unused-vars + getFilesStream(query) { + // placeholder body, overwritten in constructor + return new stream_1.Readable(); + } + constructor(storage, name, options) { + var _a, _b, _c, _d; + options = options || {}; + // Allow for "gs://"-style input, and strip any trailing slashes. + name = name.replace(/^gs:\/\//, '').replace(/\/+$/, ''); + const requestQueryObject = {}; + if ((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) { + requestQueryObject.ifGenerationMatch = + options.preconditionOpts.ifGenerationMatch; + } + if ((_b = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationNotMatch) { + requestQueryObject.ifGenerationNotMatch = + options.preconditionOpts.ifGenerationNotMatch; + } + if ((_c = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _c === void 0 ? void 0 : _c.ifMetagenerationMatch) { + requestQueryObject.ifMetagenerationMatch = + options.preconditionOpts.ifMetagenerationMatch; + } + if ((_d = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _d === void 0 ? void 0 : _d.ifMetagenerationNotMatch) { + requestQueryObject.ifMetagenerationNotMatch = + options.preconditionOpts.ifMetagenerationNotMatch; + } + const userProject = options.userProject; + if (typeof userProject === 'string') { + requestQueryObject.userProject = userProject; + } + const methods = { + /** + * Create a bucket. + * + * @method Bucket#create + * @param {CreateBucketRequest} [metadata] Metadata to set for the bucket. + * @param {CreateBucketCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * bucket.create(function(err, bucket, apiResponse) { + * if (!err) { + * // The bucket was created successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.create().then(function(data) { + * const bucket = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + create: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * IamDeleteBucketOptions Configuration options. + * @property {boolean} [ignoreNotFound = false] Ignore an error if + * the bucket does not exist. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} DeleteBucketResponse + * @property {object} 0 The full API response. + */ + /** + * @callback DeleteBucketCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Delete the bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/delete| Buckets: delete API Documentation} + * + * @method Bucket#delete + * @param {DeleteBucketOptions} [options] Configuration options. + * @param {boolean} [options.ignoreNotFound = false] Ignore an error if + * the bucket does not exist. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {DeleteBucketCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * bucket.delete(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.delete().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/buckets.js + * region_tag:storage_delete_bucket + * Another example: + */ + delete: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {object} BucketExistsOptions Configuration options for Bucket#exists(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} BucketExistsResponse + * @property {boolean} 0 Whether the {@link Bucket} exists. + */ + /** + * @callback BucketExistsCallback + * @param {?Error} err Request error, if any. + * @param {boolean} exists Whether the {@link Bucket} exists. + */ + /** + * Check if the bucket exists. + * + * @method Bucket#exists + * @param {BucketExistsOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {BucketExistsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.exists(function(err, exists) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.exists().then(function(data) { + * const exists = data[0]; + * }); + * ``` + */ + exists: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {object} [GetBucketOptions] Configuration options for Bucket#get() + * @property {boolean} [autoCreate] Automatically create the object if + * it does not exist. Default: `false` + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} GetBucketResponse + * @property {Bucket} 0 The {@link Bucket}. + * @property {object} 1 The full API response. + */ + /** + * @callback GetBucketCallback + * @param {?Error} err Request error, if any. + * @param {Bucket} bucket The {@link Bucket}. + * @param {object} apiResponse The full API response. + */ + /** + * Get a bucket if it exists. + * + * You may optionally use this to "get or create" an object by providing + * an object with `autoCreate` set to `true`. Any extra configuration that + * is normally required for the `create` method must be contained within + * this object as well. + * + * @method Bucket#get + * @param {GetBucketOptions} [options] Configuration options. + * @param {boolean} [options.autoCreate] Automatically create the object if + * it does not exist. Default: `false` + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetBucketCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.get(function(err, bucket, apiResponse) { + * // `bucket.metadata` has been populated. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.get().then(function(data) { + * const bucket = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + get: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} GetBucketMetadataResponse + * @property {object} 0 The bucket metadata. + * @property {object} 1 The full API response. + */ + /** + * @callback GetBucketMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata The bucket metadata. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} GetBucketMetadataOptions Configuration options for Bucket#getMetadata(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Get the bucket's metadata. + * + * To set metadata, see {@link Bucket#setMetadata}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/get| Buckets: get API Documentation} + * + * @method Bucket#getMetadata + * @param {GetBucketMetadataOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetBucketMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getMetadata(function(err, metadata, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getMetadata().then(function(data) { + * const metadata = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/requesterPays.js + * region_tag:storage_get_requester_pays_status + * Example of retrieving the requester pays status of a bucket: + */ + getMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {object} SetBucketMetadataOptions Configuration options for Bucket#setMetadata(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} SetBucketMetadataResponse + * @property {object} apiResponse The full API response. + */ + /** + * @callback SetBucketMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata The bucket metadata. + */ + /** + * Set the bucket's metadata. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} + * + * @method Bucket#setMetadata + * @param {object} metadata The metadata you wish to set. + * @param {SetBucketMetadataOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Set website metadata field on the bucket. + * //- + * const metadata = { + * website: { + * mainPageSuffix: 'http://example.com', + * notFoundPage: 'http://example.com/404.html' + * } + * }; + * + * bucket.setMetadata(metadata, function(err, apiResponse) {}); + * + * //- + * // Enable versioning for your bucket. + * //- + * bucket.setMetadata({ + * versioning: { + * enabled: true + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Enable KMS encryption for objects within this bucket. + * //- + * bucket.setMetadata({ + * encryption: { + * defaultKmsKeyName: 'projects/grape-spaceship-123/...' + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Set the default event-based hold value for new objects in this + * // bucket. + * //- + * bucket.setMetadata({ + * defaultEventBasedHold: true + * }, function(err, apiResponse) {}); + * + * //- + * // Remove object lifecycle rules. + * //- + * bucket.setMetadata({ + * lifecycle: null + * }, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setMetadata(metadata).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + setMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + }; + super({ + parent: storage, + baseUrl: '/b', + id: name, + createMethod: storage.createBucket.bind(storage), + methods, + }); + this.name = name; + this.storage = storage; + this.userProject = options.userProject; + this.acl = new acl_js_1.Acl({ + request: this.request.bind(this), + pathPrefix: '/acl', + }); + this.acl.default = new acl_js_1.Acl({ + request: this.request.bind(this), + pathPrefix: '/defaultObjectAcl', + }); + this.crc32cGenerator = + options.crc32cGenerator || this.storage.crc32cGenerator; + this.iam = new iam_js_1.Iam(this); + this.getFilesStream = paginator_1.paginator.streamify('getFiles'); + this.instanceRetryValue = storage.retryOptions.autoRetry; + this.instancePreconditionOpts = options === null || options === void 0 ? void 0 : options.preconditionOpts; + } + /** + * The bucket's Cloud Storage URI (`gs://`) + * + * @example + * ```ts + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * // `gs://my-bucket` + * const href = bucket.cloudStorageURI.href; + * ``` + */ + get cloudStorageURI() { + const uri = new url_1.URL('gs://'); + uri.host = this.name; + return uri; + } + /** + * @typedef {object} AddLifecycleRuleOptions Configuration options for Bucket#addLifecycleRule(). + * @property {boolean} [append=true] The new rules will be appended to any + * pre-existing rules. + */ + /** + * + * @typedef {object} LifecycleRule The new lifecycle rule to be added to objects + * in this bucket. + * @property {string|object} action The action to be taken upon matching of + * all the conditions 'delete', 'setStorageClass', or 'AbortIncompleteMultipartUpload'. + * **Note**: For configuring a raw-formatted rule object to be passed as `action` + * please refer to the [examples]{@link https://cloud.google.com/storage/docs/managing-lifecycles#configexamples}. + * @property {object} condition Condition a bucket must meet before the + * action occurs on the bucket. Refer to following supported [conditions]{@link https://cloud.google.com/storage/docs/lifecycle#conditions}. + * @property {string} [storageClass] When using the `setStorageClass` + * action, provide this option to dictate which storage class the object + * should update to. Please see + * [SetStorageClass option documentation]{@link https://cloud.google.com/storage/docs/lifecycle#setstorageclass} for supported transitions. + */ + /** + * Add an object lifecycle management rule to the bucket. + * + * By default, an Object Lifecycle Management rule provided to this method + * will be included to the existing policy. To replace all existing rules, + * supply the `options` argument, setting `append` to `false`. + * + * To add multiple rules, pass a list to the `rule` parameter. Calling this + * function multiple times asynchronously does not guarantee that all rules + * are added correctly. + * + * See {@link https://cloud.google.com/storage/docs/lifecycle| Object Lifecycle Management} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} + * + * @param {LifecycleRule|LifecycleRule[]} rule The new lifecycle rule or rules to be added to objects + * in this bucket. + * @param {string|object} rule.action The action to be taken upon matching of + * all the conditions 'delete', 'setStorageClass', or 'AbortIncompleteMultipartUpload'. + * **Note**: For configuring a raw-formatted rule object to be passed as `action` + * please refer to the [examples]{@link https://cloud.google.com/storage/docs/managing-lifecycles#configexamples}. + * @param {object} rule.condition Condition a bucket must meet before the + * action occurson the bucket. Refer to followitn supported [conditions]{@link https://cloud.google.com/storage/docs/lifecycle#conditions}. + * @param {string} [rule.storageClass] When using the `setStorageClass` + * action, provide this option to dictate which storage class the object + * should update to. + * @param {AddLifecycleRuleOptions} [options] Configuration object. + * @param {boolean} [options.append=true] Append the new rule to the existing + * policy. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Automatically have an object deleted from this bucket once it is 3 years + * // of age. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * age: 365 * 3 // Specified in days. + * } + * }, function(err, apiResponse) { + * if (err) { + * // Error handling omitted. + * } + * + * const lifecycleRules = bucket.metadata.lifecycle.rule; + * + * // Iterate over the Object Lifecycle Management rules on this bucket. + * lifecycleRules.forEach(lifecycleRule => {}); + * }); + * + * //- + * // By default, the rule you provide will be added to the existing policy. + * // Optionally, you can disable this behavior to replace all of the + * // pre-existing rules. + * //- + * const options = { + * append: false + * }; + * + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * age: 365 * 3 // Specified in days. + * } + * }, options, function(err, apiResponse) { + * if (err) { + * // Error handling omitted. + * } + * + * // All rules have been replaced with the new "delete" rule. + * + * // Iterate over the Object Lifecycle Management rules on this bucket. + * lifecycleRules.forEach(lifecycleRule => {}); + * }); + * + * //- + * // For objects created before 2018, "downgrade" the storage class. + * //- + * bucket.addLifecycleRule({ + * action: 'setStorageClass', + * storageClass: 'COLDLINE', + * condition: { + * createdBefore: new Date('2018') + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Delete objects created before 2016 which have the Coldline storage + * // class. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * matchesStorageClass: [ + * 'COLDLINE' + * ], + * createdBefore: new Date('2016') + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Delete object that has a noncurrent timestamp that is at least 100 days. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * daysSinceNoncurrentTime: 100 + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Delete object that has a noncurrent timestamp before 2020-01-01. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * noncurrentTimeBefore: new Date('2020-01-01') + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Delete object that has a customTime that is at least 100 days. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * daysSinceCustomTime: 100 + * } + * }, function(err, apiResponse) ()); + * + * //- + * // Delete object that has a customTime before 2020-01-01. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * customTimeBefore: new Date('2020-01-01') + * } + * }, function(err, apiResponse) {}); + * ``` + */ + addLifecycleRule(rule, optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + options = options || {}; + const rules = Array.isArray(rule) ? rule : [rule]; + for (const curRule of rules) { + if (curRule.condition.createdBefore instanceof Date) { + curRule.condition.createdBefore = curRule.condition.createdBefore + .toISOString() + .replace(/T.+$/, ''); + } + if (curRule.condition.customTimeBefore instanceof Date) { + curRule.condition.customTimeBefore = curRule.condition.customTimeBefore + .toISOString() + .replace(/T.+$/, ''); + } + if (curRule.condition.noncurrentTimeBefore instanceof Date) { + curRule.condition.noncurrentTimeBefore = + curRule.condition.noncurrentTimeBefore + .toISOString() + .replace(/T.+$/, ''); + } + } + if (options.append === false) { + this.setMetadata({ lifecycle: { rule: rules } }, options, callback); + return; + } + // The default behavior appends the previously-defined lifecycle rules with + // the new ones just passed in by the user. + this.getMetadata((err, metadata) => { + var _a, _b; + if (err) { + callback(err); + return; + } + const currentLifecycleRules = Array.isArray((_a = metadata.lifecycle) === null || _a === void 0 ? void 0 : _a.rule) + ? (_b = metadata.lifecycle) === null || _b === void 0 ? void 0 : _b.rule + : []; + this.setMetadata({ + lifecycle: { rule: currentLifecycleRules.concat(rules) }, + }, options, callback); + }); + } + /** + * @typedef {object} CombineOptions + * @property {string} [kmsKeyName] Resource name of the Cloud KMS key, of + * the form + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`, + * that will be used to encrypt the object. Overwrites the object + * metadata's `kms_key_name` value, if any. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback CombineCallback + * @param {?Error} err Request error, if any. + * @param {File} newFile The new {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} CombineResponse + * @property {File} 0 The new {@link File}. + * @property {object} 1 The full API response. + */ + /** + * Combine multiple files into one new file. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/compose| Objects: compose API Documentation} + * + * @throws {Error} if a non-array is provided as sources argument. + * @throws {Error} if no sources are provided. + * @throws {Error} if no destination is provided. + * + * @param {string[]|File[]} sources The source files that will be + * combined. + * @param {string|File} destination The file you would like the + * source files combined into. + * @param {CombineOptions} [options] Configuration options. + * @param {string} [options.kmsKeyName] Resource name of the Cloud KMS key, of + * the form + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`, + * that will be used to encrypt the object. Overwrites the object + * metadata's `kms_key_name` value, if any. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + + * @param {CombineCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const logBucket = storage.bucket('log-bucket'); + * + * const sources = [ + * logBucket.file('2013-logs.txt'), + * logBucket.file('2014-logs.txt') + * ]; + * + * const allLogs = logBucket.file('all-logs.txt'); + * + * logBucket.combine(sources, allLogs, function(err, newFile, apiResponse) { + * // newFile === allLogs + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * logBucket.combine(sources, allLogs).then(function(data) { + * const newFile = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + combine(sources, destination, optionsOrCallback, callback) { + var _a; + if (!Array.isArray(sources) || sources.length === 0) { + throw new Error(BucketExceptionMessages.PROVIDE_SOURCE_FILE); + } + if (!destination) { + throw new Error(BucketExceptionMessages.DESTINATION_FILE_NOT_SPECIFIED); + } + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, // Not relevant but param is required + AvailableServiceObjectMethods.setMetadata, // Same as above + options); + const convertToFile = (file) => { + if (file instanceof file_js_1.File) { + return file; + } + return this.file(file); + }; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + sources = sources.map(convertToFile); + const destinationFile = convertToFile(destination); + callback = callback || index_js_1.util.noop; + if (!destinationFile.metadata.contentType) { + const destinationContentType = mime_1.default.getType(destinationFile.name) || undefined; + if (destinationContentType) { + destinationFile.metadata.contentType = destinationContentType; + } + } + let maxRetries = this.storage.retryOptions.maxRetries; + if ((((_a = destinationFile === null || destinationFile === void 0 ? void 0 : destinationFile.instancePreconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === + undefined && + options.ifGenerationMatch === undefined && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + maxRetries = 0; + } + if (options.ifGenerationMatch === undefined) { + Object.assign(options, destinationFile.instancePreconditionOpts, options); + } + // Make the request from the destination File object. + destinationFile.request({ + method: 'POST', + uri: '/compose', + maxRetries, + json: { + destination: { + contentType: destinationFile.metadata.contentType, + contentEncoding: destinationFile.metadata.contentEncoding, + }, + sourceObjects: sources.map(source => { + const sourceObject = { + name: source.name, + }; + if (source.metadata && source.metadata.generation) { + sourceObject.generation = parseInt(source.metadata.generation.toString()); + } + return sourceObject; + }), + }, + qs: options, + }, (err, resp) => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + if (err) { + callback(err, null, resp); + return; + } + callback(null, destinationFile, resp); + }); + } + /** + * See a {@link https://cloud.google.com/storage/docs/json_api/v1/objects/watchAll| Objects: watchAll request body}. + * + * @typedef {object} CreateChannelConfig + * @property {string} address The address where notifications are + * delivered for this channel. + * @property {string} [delimiter] Returns results in a directory-like mode. + * @property {number} [maxResults] Maximum number of `items` plus `prefixes` + * to return in a single page of responses. + * @property {string} [pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @property {string} [prefix] Filter results to objects whose names begin + * with this prefix. + * @property {string} [projection=noAcl] Set of properties to return. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {boolean} [versions=false] If `true`, lists all versions of an object + * as distinct results. + */ + /** + * @typedef {object} CreateChannelOptions + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} CreateChannelResponse + * @property {Channel} 0 The new {@link Channel}. + * @property {object} 1 The full API response. + */ + /** + * @callback CreateChannelCallback + * @param {?Error} err Request error, if any. + * @param {Channel} channel The new {@link Channel}. + * @param {object} apiResponse The full API response. + */ + /** + * Create a channel that will be notified when objects in this bucket changes. + * + * @throws {Error} If an ID is not provided. + * @throws {Error} If an address is not provided. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/watchAll| Objects: watchAll API Documentation} + * + * @param {string} id The ID of the channel to create. + * @param {CreateChannelConfig} config Configuration for creating channel. + * @param {string} config.address The address where notifications are + * delivered for this channel. + * @param {string} [config.delimiter] Returns results in a directory-like mode. + * @param {number} [config.maxResults] Maximum number of `items` plus `prefixes` + * to return in a single page of responses. + * @param {string} [config.pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @param {string} [config.prefix] Filter results to objects whose names begin + * with this prefix. + * @param {string} [config.projection=noAcl] Set of properties to return. + * @param {string} [config.userProject] The ID of the project which will be + * billed for the request. + * @param {boolean} [config.versions=false] If `true`, lists all versions of an object + * as distinct results. + * @param {CreateChannelOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {CreateChannelCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const id = 'new-channel-id'; + * + * const config = { + * address: 'https://...' + * }; + * + * bucket.createChannel(id, config, function(err, channel, apiResponse) { + * if (!err) { + * // Channel created successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.createChannel(id, config).then(function(data) { + * const channel = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + createChannel(id, config, optionsOrCallback, callback) { + if (typeof id !== 'string') { + throw new Error(BucketExceptionMessages.CHANNEL_ID_REQUIRED); + } + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.request({ + method: 'POST', + uri: '/o/watch', + json: Object.assign({ + id, + type: 'web_hook', + }, config), + qs: options, + }, (err, apiResponse) => { + if (err) { + callback(err, null, apiResponse); + return; + } + const resourceId = apiResponse.resourceId; + const channel = this.storage.channel(id, resourceId); + channel.metadata = apiResponse; + callback(null, channel, apiResponse); + }); + } + /** + * Metadata to set for the Notification. + * + * @typedef {object} CreateNotificationOptions + * @property {object} [customAttributes] An optional list of additional + * attributes to attach to each Cloud PubSub message published for this + * notification subscription. + * @property {string[]} [eventTypes] If present, only send notifications about + * listed event types. If empty, sent notifications for all event types. + * @property {string} [objectNamePrefix] If present, only apply this + * notification configuration to object names that begin with this prefix. + * @property {string} [payloadFormat] The desired content of the Payload. + * Defaults to `JSON_API_V1`. + * + * Acceptable values are: + * - `JSON_API_V1` + * + * - `NONE` + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback CreateNotificationCallback + * @param {?Error} err Request error, if any. + * @param {Notification} notification The new {@link Notification}. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} CreateNotificationResponse + * @property {Notification} 0 The new {@link Notification}. + * @property {object} 1 The full API response. + */ + /** + * Creates a notification subscription for the bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/insert| Notifications: insert} + * + * @param {Topic|string} topic The Cloud PubSub topic to which this + * subscription publishes. If the project ID is omitted, the current + * project ID will be used. + * + * Acceptable formats are: + * - `projects/grape-spaceship-123/topics/my-topic` + * + * - `my-topic` + * @param {CreateNotificationOptions} [options] Metadata to set for the + * notification. + * @param {object} [options.customAttributes] An optional list of additional + * attributes to attach to each Cloud PubSub message published for this + * notification subscription. + * @param {string[]} [options.eventTypes] If present, only send notifications about + * listed event types. If empty, sent notifications for all event types. + * @param {string} [options.objectNamePrefix] If present, only apply this + * notification configuration to object names that begin with this prefix. + * @param {string} [options.payloadFormat] The desired content of the Payload. + * Defaults to `JSON_API_V1`. + * + * Acceptable values are: + * - `JSON_API_V1` + * + * - `NONE` + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {CreateNotificationCallback} [callback] Callback function. + * @returns {Promise} + * @throws {Error} If a valid topic is not provided. + * @see Notification#create + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const callback = function(err, notification, apiResponse) { + * if (!err) { + * // The notification was created successfully. + * } + * }; + * + * myBucket.createNotification('my-topic', callback); + * + * //- + * // Configure the nofiication by providing Notification metadata. + * //- + * const metadata = { + * objectNamePrefix: 'prefix-' + * }; + * + * myBucket.createNotification('my-topic', metadata, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.createNotification('my-topic').then(function(data) { + * const notification = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/createNotification.js + * region_tag:storage_create_bucket_notifications + * Another example: + */ + createNotification(topic, optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + const topicIsObject = topic !== null && typeof topic === 'object'; + if (topicIsObject && index_js_1.util.isCustomType(topic, 'pubsub/topic')) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + topic = topic.name; + } + if (typeof topic !== 'string') { + throw new Error(BucketExceptionMessages.TOPIC_NAME_REQUIRED); + } + const body = Object.assign({ topic }, options); + if (body.topic.indexOf('projects') !== 0) { + body.topic = 'projects/{{projectId}}/topics/' + body.topic; + } + body.topic = `//pubsub.${this.storage.universeDomain}/` + body.topic; + if (!body.payloadFormat) { + body.payloadFormat = 'JSON_API_V1'; + } + const query = {}; + if (body.userProject) { + query.userProject = body.userProject; + delete body.userProject; + } + this.request({ + method: 'POST', + uri: '/notificationConfigs', + json: (0, util_js_1.convertObjKeysToSnakeCase)(body), + qs: query, + maxRetries: 0, //explicitly set this value since this is a non-idempotent function + }, (err, apiResponse) => { + if (err) { + callback(err, null, apiResponse); + return; + } + const notification = this.notification(apiResponse.id); + notification.metadata = apiResponse; + callback(null, notification, apiResponse); + }); + } + /** + * @typedef {object} DeleteFilesOptions Query object. See {@link Bucket#getFiles} + * for all of the supported properties. + * @property {boolean} [force] Suppress errors until all files have been + * processed. + */ + /** + * @callback DeleteFilesCallback + * @param {?Error|?Error[]} err Request error, if any, or array of errors from + * files that were not able to be deleted. + * @param {object} [apiResponse] The full API response. + */ + /** + * Iterate over the bucket's files, calling `file.delete()` on each. + * + * This is not an atomic request. A delete attempt will be + * made for each file individually. Any one can fail, in which case only a + * portion of the files you intended to be deleted would have. + * + * Operations are performed in parallel, up to 10 at once. The first error + * breaks the loop and will execute the provided callback with it. Specify + * `{ force: true }` to suppress the errors until all files have had a chance + * to be processed. + * + * File preconditions cannot be passed to this function. It will not retry unless + * the idempotency strategy is set to retry always. + * + * The `query` object passed as the first argument will also be passed to + * {@link Bucket#getFiles}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/delete| Objects: delete API Documentation} + * + * @param {DeleteFilesOptions} [query] Query object. See {@link Bucket#getFiles} + * @param {boolean} [query.force] Suppress errors until all files have been + * processed. + * @param {DeleteFilesCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Delete all of the files in the bucket. + * //- + * bucket.deleteFiles(function(err) {}); + * + * //- + * // By default, if a file cannot be deleted, this method will stop deleting + * // files from your bucket. You can override this setting with `force: + * // true`. + * //- + * bucket.deleteFiles({ + * force: true + * }, function(errors) { + * // `errors`: + * // Array of errors if any occurred, otherwise null. + * }); + * + * //- + * // The first argument to this method acts as a query to + * // {@link Bucket#getFiles}. As an example, you can delete files + * // which match a prefix. + * //- + * bucket.deleteFiles({ + * prefix: 'images/' + * }, function(err) { + * if (!err) { + * // All files in the `images` directory have been deleted. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.deleteFiles().then(function() {}); + * ``` + */ + deleteFiles(queryOrCallback, callback) { + let query = {}; + if (typeof queryOrCallback === 'function') { + callback = queryOrCallback; + } + else if (queryOrCallback) { + query = queryOrCallback; + } + const MAX_PARALLEL_LIMIT = 10; + const MAX_QUEUE_SIZE = 1000; + const errors = []; + const deleteFile = (file) => { + return file.delete(query).catch(err => { + if (!query.force) { + throw err; + } + errors.push(err); + }); + }; + (async () => { + try { + let promises = []; + const limit = (0, p_limit_1.default)(MAX_PARALLEL_LIMIT); + const filesStream = this.getFilesStream(query); + for await (const curFile of filesStream) { + if (promises.length >= MAX_QUEUE_SIZE) { + await Promise.all(promises); + promises = []; + } + promises.push(limit(() => deleteFile(curFile)).catch(e => { + filesStream.destroy(); + throw e; + })); + } + await Promise.all(promises); + callback(errors.length > 0 ? errors : null); + } + catch (e) { + callback(e); + return; + } + })(); + } + /** + * @deprecated + * @typedef {array} DeleteLabelsResponse + * @property {object} 0 The full API response. + */ + /** + * @deprecated + * @callback DeleteLabelsCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata Bucket's metadata. + */ + /** + * @deprecated Use setMetadata directly + * Delete one or more labels from this bucket. + * + * @param {string|string[]} [labels] The labels to delete. If no labels are + * provided, all of the labels are removed. + * @param {DeleteLabelsCallback} [callback] Callback function. + * @param {DeleteLabelsOptions} [options] Options, including precondition options + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Delete all of the labels from this bucket. + * //- + * bucket.deleteLabels(function(err, apiResponse) {}); + * + * //- + * // Delete a single label. + * //- + * bucket.deleteLabels('labelone', function(err, apiResponse) {}); + * + * //- + * // Delete a specific set of labels. + * //- + * bucket.deleteLabels([ + * 'labelone', + * 'labeltwo' + * ], function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.deleteLabels().then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + deleteLabels(labelsOrCallbackOrOptions, optionsOrCallback, callback) { + let labels = new Array(); + let options = {}; + if (typeof labelsOrCallbackOrOptions === 'function') { + callback = labelsOrCallbackOrOptions; + } + else if (typeof labelsOrCallbackOrOptions === 'string') { + labels = [labelsOrCallbackOrOptions]; + } + else if (Array.isArray(labelsOrCallbackOrOptions)) { + labels = labelsOrCallbackOrOptions; + } + else if (labelsOrCallbackOrOptions) { + options = labelsOrCallbackOrOptions; + } + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + const deleteLabels = (labels) => { + const nullLabelMap = labels.reduce((nullLabelMap, labelKey) => { + nullLabelMap[labelKey] = null; + return nullLabelMap; + }, {}); + if ((options === null || options === void 0 ? void 0 : options.ifMetagenerationMatch) !== undefined) { + this.setLabels(nullLabelMap, options, callback); + } + else { + this.setLabels(nullLabelMap, callback); + } + }; + if (labels.length === 0) { + this.getLabels((err, labels) => { + if (err) { + callback(err); + return; + } + deleteLabels(Object.keys(labels)); + }); + } + else { + deleteLabels(labels); + } + } + /** + * @typedef {array} DisableRequesterPaysResponse + * @property {object} 0 The full API response. + */ + /** + * @callback DisableRequesterPaysCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + *

+ * Early Access Testers Only + *

+ * This feature is not yet widely-available. + *

+ *
+ * + * Disable `requesterPays` functionality from this bucket. + * + * @param {DisableRequesterPaysCallback} [callback] Callback function. + * @param {DisableRequesterPaysOptions} [options] Options, including precondition options + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.disableRequesterPays(function(err, apiResponse) { + * if (!err) { + * // requesterPays functionality disabled successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.disableRequesterPays().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/requesterPays.js + * region_tag:storage_disable_requester_pays + * Example of disabling requester pays: + */ + disableRequesterPays(optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.setMetadata({ + billing: { + requesterPays: false, + }, + }, options, callback); + } + /** + * Configuration object for enabling logging. + * + * @typedef {object} EnableLoggingOptions + * @property {string|Bucket} [bucket] The bucket for the log entries. By + * default, the current bucket is used. + * @property {string} prefix A unique prefix for log object names. + */ + /** + * Enable logging functionality for this bucket. This will make two API + * requests, first to grant Cloud Storage WRITE permission to the bucket, then + * to set the appropriate configuration on the Bucket's metadata. + * + * @param {EnableLoggingOptions} config Configuration options. + * @param {string|Bucket} [config.bucket] The bucket for the log entries. By + * default, the current bucket is used. + * @param {string} config.prefix A unique prefix for log object names. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * const config = { + * prefix: 'log' + * }; + * + * bucket.enableLogging(config, function(err, apiResponse) { + * if (!err) { + * // Logging functionality enabled successfully. + * } + * }); + * + * ``` + * @example + * Optionally, provide a destination bucket. + * ``` + * const config = { + * prefix: 'log', + * bucket: 'destination-bucket' + * }; + * + * bucket.enableLogging(config, function(err, apiResponse) {}); + * ``` + * + * @example + * If the callback is omitted, we'll return a Promise. + * ``` + * bucket.enableLogging(config).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + enableLogging(config, callback) { + if (!config || + typeof config === 'function' || + typeof config.prefix === 'undefined') { + throw new Error(BucketExceptionMessages.CONFIGURATION_OBJECT_PREFIX_REQUIRED); + } + let logBucket = this.id; + if (config.bucket && config.bucket instanceof Bucket) { + logBucket = config.bucket.id; + } + else if (config.bucket && typeof config.bucket === 'string') { + logBucket = config.bucket; + } + const options = {}; + if (config === null || config === void 0 ? void 0 : config.ifMetagenerationMatch) { + options.ifMetagenerationMatch = config.ifMetagenerationMatch; + } + if (config === null || config === void 0 ? void 0 : config.ifMetagenerationNotMatch) { + options.ifMetagenerationNotMatch = config.ifMetagenerationNotMatch; + } + (async () => { + try { + const [policy] = await this.iam.getPolicy(); + policy.bindings.push({ + members: ['group:cloud-storage-analytics@google.com'], + role: 'roles/storage.objectCreator', + }); + await this.iam.setPolicy(policy); + this.setMetadata({ + logging: { + logBucket, + logObjectPrefix: config.prefix, + }, + }, options, callback); + } + catch (e) { + callback(e); + return; + } + })(); + } + /** + * @typedef {array} EnableRequesterPaysResponse + * @property {object} 0 The full API response. + */ + /** + * @callback EnableRequesterPaysCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + *
+ * Early Access Testers Only + *

+ * This feature is not yet widely-available. + *

+ *
+ * + * Enable `requesterPays` functionality for this bucket. This enables you, the + * bucket owner, to have the requesting user assume the charges for the access + * to your bucket and its contents. + * + * @param {EnableRequesterPaysCallback | EnableRequesterPaysOptions} [optionsOrCallback] + * Callback function or precondition options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.enableRequesterPays(function(err, apiResponse) { + * if (!err) { + * // requesterPays functionality enabled successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.enableRequesterPays().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/requesterPays.js + * region_tag:storage_enable_requester_pays + * Example of enabling requester pays: + */ + enableRequesterPays(optionsOrCallback, cb) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + cb = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.setMetadata({ + billing: { + requesterPays: true, + }, + }, options, cb); + } + /** + * Create a {@link File} object. See {@link File} to see how to handle + * the different use cases you may have. + * + * @param {string} name The name of the file in this bucket. + * @param {FileOptions} [options] Configuration options. + * @param {string|number} [options.generation] Only use a specific revision of + * this file. + * @param {string} [options.encryptionKey] A custom encryption key. See + * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. + * @param {string} [options.kmsKeyName] The name of the Cloud KMS key that will + * be used to encrypt the object. Must be in the format: + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. + * KMS key ring must use the same location as the bucket. + * @param {string} [options.userProject] The ID of the project which will be + * billed for all requests made from File object. + * @returns {File} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const file = bucket.file('my-existing-file.png'); + * ``` + */ + file(name, options) { + if (!name) { + throw Error(BucketExceptionMessages.SPECIFY_FILE_NAME); + } + return new file_js_1.File(this, name, options); + } + /** + * @typedef {array} GetFilesResponse + * @property {File[]} 0 Array of {@link File} instances. + * @param {object} nextQuery 1 A query object to receive more results. + * @param {object} apiResponse 2 The full API response. + */ + /** + * @callback GetFilesCallback + * @param {?Error} err Request error, if any. + * @param {File[]} files Array of {@link File} instances. + * @param {object} nextQuery A query object to receive more results. + * @param {object} apiResponse The full API response. + */ + /** + * Query object for listing files. + * + * @typedef {object} GetFilesOptions + * @property {boolean} [autoPaginate=true] Have pagination handled + * automatically. + * @property {string} [delimiter] Results will contain only objects whose + * names, aside from the prefix, do not contain delimiter. Objects whose + * names, aside from the prefix, contain delimiter will have their name + * truncated after the delimiter, returned in `apiResponse.prefixes`. + * Duplicate prefixes are omitted. + * @property {string} [endOffset] Filter results to objects whose names are + * lexicographically before endOffset. If startOffset is also set, the objects + * listed have names between startOffset (inclusive) and endOffset (exclusive). + * @property {boolean} [includeFoldersAsPrefixes] If true, includes folders and + * managed folders in the set of prefixes returned by the query. Only applicable if + * delimiter is set to / and autoPaginate is set to false. + * See: https://cloud.google.com/storage/docs/managed-folders + * @property {boolean} [includeTrailingDelimiter] If true, objects that end in + * exactly one instance of delimiter have their metadata included in items[] + * in addition to the relevant part of the object name appearing in prefixes[]. + * @property {string} [prefix] Filter results to objects whose names begin + * with this prefix. + * @property {string} [matchGlob] A glob pattern used to filter results, + * for example foo*bar + * @property {number} [maxApiCalls] Maximum number of API calls to make. + * @property {number} [maxResults] Maximum number of items plus prefixes to + * return per call. + * Note: By default will handle pagination automatically + * if more than 1 page worth of results are requested per call. + * When `autoPaginate` is set to `false` the smaller of `maxResults` + * or 1 page of results will be returned per call. + * @property {string} [pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @property {boolean} [softDeleted] If true, only soft-deleted object versions will be + * listed as distinct results in order of generation number. Note `soft_deleted` and + * `versions` cannot be set to true simultaneously. + * @property {string} [startOffset] Filter results to objects whose names are + * lexicographically equal to or after startOffset. If endOffset is also set, + * the objects listed have names between startOffset (inclusive) and endOffset (exclusive). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {boolean} [versions] If true, returns File objects scoped to + * their versions. + */ + /** + * Get {@link File} objects for the files currently in the bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/list| Objects: list API Documentation} + * + * @param {GetFilesOptions} [query] Query object for listing files. + * @param {boolean} [query.autoPaginate=true] Have pagination handled + * automatically. + * @param {string} [query.delimiter] Results will contain only objects whose + * names, aside from the prefix, do not contain delimiter. Objects whose + * names, aside from the prefix, contain delimiter will have their name + * truncated after the delimiter, returned in `apiResponse.prefixes`. + * Duplicate prefixes are omitted. + * @param {string} [query.endOffset] Filter results to objects whose names are + * lexicographically before endOffset. If startOffset is also set, the objects + * listed have names between startOffset (inclusive) and endOffset (exclusive). + * @param {boolean} [query.includeFoldersAsPrefixes] If true, includes folders and + * managed folders in the set of prefixes returned by the query. Only applicable if + * delimiter is set to / and autoPaginate is set to false. + * See: https://cloud.google.com/storage/docs/managed-folders + * @param {boolean} [query.includeTrailingDelimiter] If true, objects that end in + * exactly one instance of delimiter have their metadata included in items[] + * in addition to the relevant part of the object name appearing in prefixes[]. + * @param {string} [query.prefix] Filter results to objects whose names begin + * with this prefix. + * @param {number} [query.maxApiCalls] Maximum number of API calls to make. + * @param {number} [query.maxResults] Maximum number of items plus prefixes to + * return per call. + * Note: By default will handle pagination automatically + * if more than 1 page worth of results are requested per call. + * When `autoPaginate` is set to `false` the smaller of `maxResults` + * or 1 page of results will be returned per call. + * @param {string} [query.pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @param {boolean} [query.softDeleted] If true, only soft-deleted object versions will be + * listed as distinct results in order of generation number. Note `soft_deleted` and + * `versions` cannot be set to true simultaneously. + * @param {string} [query.startOffset] Filter results to objects whose names are + * lexicographically equal to or after startOffset. If endOffset is also set, + * the objects listed have names between startOffset (inclusive) and endOffset (exclusive). + * @param {string} [query.userProject] The ID of the project which will be + * billed for the request. + * @param {boolean} [query.versions] If true, returns File objects scoped to + * their versions. + * @param {GetFilesCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getFiles(function(err, files) { + * if (!err) { + * // files is an array of File objects. + * } + * }); + * + * //- + * // If your bucket has versioning enabled, you can get all of your files + * // scoped to their generation. + * //- + * bucket.getFiles({ + * versions: true + * }, function(err, files) { + * // Each file is scoped to its generation. + * }); + * + * //- + * // To control how many API requests are made and page through the results + * // manually, set `autoPaginate` to `false`. + * //- + * const callback = function(err, files, nextQuery, apiResponse) { + * if (nextQuery) { + * // More results exist. + * bucket.getFiles(nextQuery, callback); + * } + * + * // The `metadata` property is populated for you with the metadata at the + * // time of fetching. + * files[0].metadata; + * + * // However, in cases where you are concerned the metadata could have + * // changed, use the `getMetadata` method. + * files[0].getMetadata(function(err, metadata) {}); + * }; + * + * bucket.getFiles({ + * autoPaginate: false + * }, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getFiles().then(function(data) { + * const files = data[0]; + * }); + * + * ``` + * @example + *
Simulating a File System

With `autoPaginate: false`, it's possible to iterate over files which incorporate a common structure using a delimiter.

Consider the following remote objects:

  1. "a"
  2. "a/b/c/d"
  3. "b/d/e"

Using a delimiter of `/` will return a single file, "a".

`apiResponse.prefixes` will return the "sub-directories" that were found:

  1. "a/"
  2. "b/"
+ * ``` + * bucket.getFiles({ + * autoPaginate: false, + * delimiter: '/' + * }, function(err, files, nextQuery, apiResponse) { + * // files = [ + * // {File} // File object for file "a" + * // ] + * + * // apiResponse.prefixes = [ + * // 'a/', + * // 'b/' + * // ] + * }); + * ``` + * + * @example + * Using prefixes, it's now possible to simulate a file system with follow-up requests. + * ``` + * bucket.getFiles({ + * autoPaginate: false, + * delimiter: '/', + * prefix: 'a/' + * }, function(err, files, nextQuery, apiResponse) { + * // No files found within "directory" a. + * // files = [] + * + * // However, a "sub-directory" was found. + * // This prefix can be used to continue traversing the "file system". + * // apiResponse.prefixes = [ + * // 'a/b/' + * // ] + * }); + * ``` + * + * @example include:samples/files.js + * region_tag:storage_list_files + * Another example: + * + * @example include:samples/files.js + * region_tag:storage_list_files_with_prefix + * Example of listing files, filtered by a prefix: + */ + getFiles(queryOrCallback, callback) { + let query = typeof queryOrCallback === 'object' ? queryOrCallback : {}; + if (!callback) { + callback = queryOrCallback; + } + query = Object.assign({}, query); + if (query.fields && + query.autoPaginate && + !query.fields.includes('nextPageToken')) { + query.fields = `${query.fields},nextPageToken`; + } + this.request({ + uri: '/o', + qs: query, + }, (err, resp) => { + if (err) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + callback(err, null, null, resp); + return; + } + const itemsArray = resp.items ? resp.items : []; + const files = itemsArray.map((file) => { + const options = {}; + if (query.fields) { + const fileInstance = file; + return fileInstance; + } + if (query.versions) { + options.generation = file.generation; + } + if (file.kmsKeyName) { + options.kmsKeyName = file.kmsKeyName; + } + const fileInstance = this.file(file.name, options); + fileInstance.metadata = file; + return fileInstance; + }); + let nextQuery = null; + if (resp.nextPageToken) { + nextQuery = Object.assign({}, query, { + pageToken: resp.nextPageToken, + }); + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + callback(null, files, nextQuery, resp); + }); + } + /** + * @deprecated + * @typedef {object} GetLabelsOptions Configuration options for Bucket#getLabels(). + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @deprecated + * @typedef {array} GetLabelsResponse + * @property {object} 0 Object of labels currently set on this bucket. + */ + /** + * @deprecated + * @callback GetLabelsCallback + * @param {?Error} err Request error, if any. + * @param {object} labels Object of labels currently set on this bucket. + */ + /** + * @deprecated Use getMetadata directly. + * Get the labels currently set on this bucket. + * + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetLabelsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getLabels(function(err, labels) { + * if (err) { + * // Error handling omitted. + * } + * + * // labels = { + * // label: 'labelValue', + * // ... + * // } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getLabels().then(function(data) { + * const labels = data[0]; + * }); + * ``` + */ + getLabels(optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.getMetadata(options, (err, metadata) => { + if (err) { + callback(err, null); + return; + } + callback(null, (metadata === null || metadata === void 0 ? void 0 : metadata.labels) || {}); + }); + } + /** + * @typedef {object} GetNotificationsOptions Configuration options for Bucket#getNotification(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback GetNotificationsCallback + * @param {?Error} err Request error, if any. + * @param {Notification[]} notifications Array of {@link Notification} + * instances. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} GetNotificationsResponse + * @property {Notification[]} 0 Array of {@link Notification} instances. + * @property {object} 1 The full API response. + */ + /** + * Retrieves a list of notification subscriptions for a given bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/list| Notifications: list} + * + * @param {GetNotificationsOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetNotificationsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * bucket.getNotifications(function(err, notifications, apiResponse) { + * if (!err) { + * // notifications is an array of Notification objects. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getNotifications().then(function(data) { + * const notifications = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/listNotifications.js + * region_tag:storage_list_bucket_notifications + * Another example: + */ + getNotifications(optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.request({ + uri: '/notificationConfigs', + qs: options, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + const itemsArray = resp.items ? resp.items : []; + const notifications = itemsArray.map((notification) => { + const notificationInstance = this.notification(notification.id); + notificationInstance.metadata = notification; + return notificationInstance; + }); + callback(null, notifications, resp); + }); + } + /** + * @typedef {array} GetSignedUrlResponse + * @property {object} 0 The signed URL. + */ + /** + * @callback GetSignedUrlCallback + * @param {?Error} err Request error, if any. + * @param {object} url The signed URL. + */ + /** + * @typedef {object} GetBucketSignedUrlConfig + * @property {string} action Only listing objects within a bucket (HTTP: GET) is supported for bucket-level signed URLs. + * @property {*} expires A timestamp when this link will expire. Any value + * given is passed to `new Date()`. + * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now. + * @property {string} [version='v2'] The signing version to use, either + * 'v2' or 'v4'. + * @property {boolean} [virtualHostedStyle=false] Use virtual hosted-style + * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style + * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs + * should generally be preferred instaed of path-style URL. + * Currently defaults to `false` for path-style, although this may change in a + * future major-version release. + * @property {string} [cname] The cname for this bucket, i.e., + * "https://cdn.example.com". + * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example} + * @property {object} [extensionHeaders] If these headers are used, the + * server will check to make sure that the client provides matching + * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers} + * for the requirements of this feature, most notably: + * - The header name must be prefixed with `x-goog-` + * - The header name must be all lowercase + * + * Note: Multi-valued header passed as an array in the extensionHeaders + * object is converted into a string, delimited by `,` with + * no space. Requests made using the signed URL will need to + * delimit multi-valued headers using a single `,` as well, or + * else the server will report a mismatched signature. + * @property {object} [queryParams] Additional query parameters to include + * in the signed URL. + */ + /** + * Get a signed URL to allow limited time access to a bucket. + * + * In Google Cloud Platform environments, such as Cloud Functions and App + * Engine, you usually don't provide a `keyFilename` or `credentials` during + * instantiation. In those environments, we call the + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} + * to create a signed URL. That API requires either the + * `https://www.googleapis.com/auth/iam` or + * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are + * enabled. + * + * See {@link https://cloud.google.com/storage/docs/access-control/signed-urls| Signed URLs Reference} + * + * @throws {Error} if an expiration timestamp from the past is given. + * + * @param {GetBucketSignedUrlConfig} config Configuration object. + * @param {string} config.action Currently only supports "list" (HTTP: GET). + * @param {*} config.expires A timestamp when this link will expire. Any value + * given is passed to `new Date()`. + * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now. + * @param {string} [config.version='v2'] The signing version to use, either + * 'v2' or 'v4'. + * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style + * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style + * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs + * should generally be preferred instaed of path-style URL. + * Currently defaults to `false` for path-style, although this may change in a + * future major-version release. + * @param {string} [config.cname] The cname for this bucket, i.e., + * "https://cdn.example.com". + * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example} + * @param {object} [config.extensionHeaders] If these headers are used, the + * server will check to make sure that the client provides matching + * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers} + * for the requirements of this feature, most notably: + * - The header name must be prefixed with `x-goog-` + * - The header name must be all lowercase + * + * Note: Multi-valued header passed as an array in the extensionHeaders + * object is converted into a string, delimited by `,` with + * no space. Requests made using the signed URL will need to + * delimit multi-valued headers using a single `,` as well, or + * else the server will report a mismatched signature. + * @property {object} [config.queryParams] Additional query parameters to include + * in the signed URL. + * @param {GetSignedUrlCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * //- + * // Generate a URL that allows temporary access to list files in a bucket. + * //- + * const request = require('request'); + * + * const config = { + * action: 'list', + * expires: '03-17-2025' + * }; + * + * bucket.getSignedUrl(config, function(err, url) { + * if (err) { + * console.error(err); + * return; + * } + * + * // The bucket is now available to be listed from this URL. + * request(url, function(err, resp) { + * // resp.statusCode = 200 + * }); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getSignedUrl(config).then(function(data) { + * const url = data[0]; + * }); + * ``` + */ + getSignedUrl(cfg, callback) { + const method = BucketActionToHTTPMethod[cfg.action]; + const signConfig = { + method, + expires: cfg.expires, + version: cfg.version, + cname: cfg.cname, + extensionHeaders: cfg.extensionHeaders || {}, + queryParams: cfg.queryParams || {}, + host: cfg.host, + signingEndpoint: cfg.signingEndpoint, + }; + if (!this.signer) { + this.signer = new signer_js_1.URLSigner(this.storage.authClient, this, undefined, this.storage); + } + this.signer + .getSignedUrl(signConfig) + .then(signedUrl => callback(null, signedUrl), callback); + } + /** + * @callback BucketLockCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Lock a previously-defined retention policy. This will prevent changes to + * the policy. + * + * @throws {Error} if a metageneration is not provided. + * + * @param {number|string} metageneration The bucket's metageneration. This is + * accesssible from calling {@link File#getMetadata}. + * @param {BucketLockCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const bucket = storage.bucket('albums'); + * + * const metageneration = 2; + * + * bucket.lock(metageneration, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.lock(metageneration).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + lock(metageneration, callback) { + const metatype = typeof metageneration; + if (metatype !== 'number' && metatype !== 'string') { + throw new Error(BucketExceptionMessages.METAGENERATION_NOT_PROVIDED); + } + this.request({ + method: 'POST', + uri: '/lockRetentionPolicy', + qs: { + ifMetagenerationMatch: metageneration, + }, + }, callback); + } + /** + * @typedef {object} RestoreOptions Options for Bucket#restore(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/restore#resource| Object resource}. + * @param {number} [generation] If present, selects a specific revision of this object. + * @param {string} [projection] Specifies the set of properties to return. If used, must be 'full' or 'noAcl'. + */ + /** + * Restores a soft-deleted bucket + * @param {RestoreOptions} options Restore options. + * @returns {Promise} + */ + async restore(options) { + const [bucket] = await this.request({ + method: 'POST', + uri: '/restore', + qs: options, + }); + return bucket; + } + /** + * @typedef {array} MakeBucketPrivateResponse + * @property {File[]} 0 List of files made private. + */ + /** + * @callback MakeBucketPrivateCallback + * @param {?Error} err Request error, if any. + * @param {File[]} files List of files made private. + */ + /** + * @typedef {object} MakeBucketPrivateOptions + * @property {boolean} [includeFiles=false] Make each file in the bucket + * private. + * @property {Metadata} [metadata] Define custom metadata properties to define + * along with the operation. + * @property {boolean} [force] Queue errors occurred while making files + * private until all files have been processed. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Make the bucket listing private. + * + * You may also choose to make the contents of the bucket private by + * specifying `includeFiles: true`. This will automatically run + * {@link File#makePrivate} for every file in the bucket. + * + * When specifying `includeFiles: true`, use `force: true` to delay execution + * of your callback until all files have been processed. By default, the + * callback is executed after the first error. Use `force` to queue such + * errors until all files have been processed, after which they will be + * returned as an array as the first argument to your callback. + * + * NOTE: This may cause the process to be long-running and use a high number + * of requests. Use with caution. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} + * + * @param {MakeBucketPrivateOptions} [options] Configuration options. + * @param {boolean} [options.includeFiles=false] Make each file in the bucket + * private. + * @param {Metadata} [options.metadata] Define custom metadata properties to define + * along with the operation. + * @param {boolean} [options.force] Queue errors occurred while making files + * private until all files have been processed. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {MakeBucketPrivateCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Make the bucket private. + * //- + * bucket.makePrivate(function(err) {}); + * + * //- + * // Make the bucket and its contents private. + * //- + * const opts = { + * includeFiles: true + * }; + * + * bucket.makePrivate(opts, function(err, files) { + * // `err`: + * // The first error to occur, otherwise null. + * // + * // `files`: + * // Array of files successfully made private in the bucket. + * }); + * + * //- + * // Make the bucket and its contents private, using force to suppress errors + * // until all files have been processed. + * //- + * const opts = { + * includeFiles: true, + * force: true + * }; + * + * bucket.makePrivate(opts, function(errors, files) { + * // `errors`: + * // Array of errors if any occurred, otherwise null. + * // + * // `files`: + * // Array of files successfully made private in the bucket. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.makePrivate(opts).then(function(data) { + * const files = data[0]; + * }); + * ``` + */ + makePrivate(optionsOrCallback, callback) { + var _a, _b, _c, _d; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + options.private = true; + const query = { + predefinedAcl: 'projectPrivate', + }; + if (options.userProject) { + query.userProject = options.userProject; + } + if ((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) { + query.ifGenerationMatch = options.preconditionOpts.ifGenerationMatch; + } + if ((_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationNotMatch) { + query.ifGenerationNotMatch = + options.preconditionOpts.ifGenerationNotMatch; + } + if ((_c = options.preconditionOpts) === null || _c === void 0 ? void 0 : _c.ifMetagenerationMatch) { + query.ifMetagenerationMatch = + options.preconditionOpts.ifMetagenerationMatch; + } + if ((_d = options.preconditionOpts) === null || _d === void 0 ? void 0 : _d.ifMetagenerationNotMatch) { + query.ifMetagenerationNotMatch = + options.preconditionOpts.ifMetagenerationNotMatch; + } + // You aren't allowed to set both predefinedAcl & acl properties on a bucket + // so acl must explicitly be nullified. + const metadata = { ...options.metadata, acl: null }; + this.setMetadata(metadata, query, (err) => { + if (err) { + callback(err); + } + const internalCall = () => { + if (options.includeFiles) { + return (0, util_1.promisify)(this.makeAllFilesPublicPrivate_).call(this, options); + } + return Promise.resolve([]); + }; + internalCall() + .then(files => callback(null, files)) + .catch(callback); + }); + } + /** + * @typedef {object} MakeBucketPublicOptions + * @property {boolean} [includeFiles=false] Make each file in the bucket + * private. + * @property {boolean} [force] Queue errors occurred while making files + * private until all files have been processed. + */ + /** + * @callback MakeBucketPublicCallback + * @param {?Error} err Request error, if any. + * @param {File[]} files List of files made public. + */ + /** + * @typedef {array} MakeBucketPublicResponse + * @property {File[]} 0 List of files made public. + */ + /** + * Make the bucket publicly readable. + * + * You may also choose to make the contents of the bucket publicly readable by + * specifying `includeFiles: true`. This will automatically run + * {@link File#makePublic} for every file in the bucket. + * + * When specifying `includeFiles: true`, use `force: true` to delay execution + * of your callback until all files have been processed. By default, the + * callback is executed after the first error. Use `force` to queue such + * errors until all files have been processed, after which they will be + * returned as an array as the first argument to your callback. + * + * NOTE: This may cause the process to be long-running and use a high number + * of requests. Use with caution. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} + * + * @param {MakeBucketPublicOptions} [options] Configuration options. + * @param {boolean} [options.includeFiles=false] Make each file in the bucket + * private. + * @param {boolean} [options.force] Queue errors occurred while making files + * private until all files have been processed. + * @param {MakeBucketPublicCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Make the bucket publicly readable. + * //- + * bucket.makePublic(function(err) {}); + * + * //- + * // Make the bucket and its contents publicly readable. + * //- + * const opts = { + * includeFiles: true + * }; + * + * bucket.makePublic(opts, function(err, files) { + * // `err`: + * // The first error to occur, otherwise null. + * // + * // `files`: + * // Array of files successfully made public in the bucket. + * }); + * + * //- + * // Make the bucket and its contents publicly readable, using force to + * // suppress errors until all files have been processed. + * //- + * const opts = { + * includeFiles: true, + * force: true + * }; + * + * bucket.makePublic(opts, function(errors, files) { + * // `errors`: + * // Array of errors if any occurred, otherwise null. + * // + * // `files`: + * // Array of files successfully made public in the bucket. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.makePublic(opts).then(function(data) { + * const files = data[0]; + * }); + * ``` + */ + makePublic(optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const req = { public: true, ...options }; + this.acl + .add({ + entity: 'allUsers', + role: 'READER', + }) + .then(() => { + return this.acl.default.add({ + entity: 'allUsers', + role: 'READER', + }); + }) + .then(() => { + if (req.includeFiles) { + return (0, util_1.promisify)(this.makeAllFilesPublicPrivate_).call(this, req); + } + return []; + }) + .then(files => callback(null, files), callback); + } + /** + * Get a reference to a Cloud Pub/Sub Notification. + * + * @param {string} id ID of notification. + * @returns {Notification} + * @see Notification + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const notification = bucket.notification('1'); + * ``` + */ + notification(id) { + if (!id) { + throw new Error(BucketExceptionMessages.SUPPLY_NOTIFICATION_ID); + } + return new notification_js_1.Notification(this, id); + } + /** + * Remove an already-existing retention policy from this bucket, if it is not + * locked. + * + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @param {SetBucketMetadataOptions} [options] Options, including precondition options + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const bucket = storage.bucket('albums'); + * + * bucket.removeRetentionPeriod(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.removeRetentionPeriod().then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + removeRetentionPeriod(optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + this.setMetadata({ + retentionPolicy: null, + }, options, callback); + } + /** + * Makes request and applies userProject query parameter if necessary. + * + * @private + * + * @param {object} reqOpts - The request options. + * @param {function} callback - The callback function. + */ + request(reqOpts, callback) { + if (this.userProject && (!reqOpts.qs || !reqOpts.qs.userProject)) { + reqOpts.qs = { ...reqOpts.qs, userProject: this.userProject }; + } + return super.request(reqOpts, callback); + } + /** + * @deprecated + * @typedef {array} SetLabelsResponse + * @property {object} 0 The bucket metadata. + */ + /** + * @deprecated + * @callback SetLabelsCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata The bucket metadata. + */ + /** + * @deprecated + * @typedef {object} SetLabelsOptions Configuration options for Bucket#setLabels(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @deprecated Use setMetadata directly. + * Set labels on the bucket. + * + * This makes an underlying call to {@link Bucket#setMetadata}, which + * is a PATCH request. This means an individual label can be overwritten, but + * unmentioned labels will not be touched. + * + * @param {object} labels Labels to set on the bucket. + * @param {SetLabelsOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {SetLabelsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * const labels = { + * labelone: 'labelonevalue', + * labeltwo: 'labeltwovalue' + * }; + * + * bucket.setLabels(labels, function(err, metadata) { + * if (!err) { + * // Labels set successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setLabels(labels).then(function(data) { + * const metadata = data[0]; + * }); + * ``` + */ + setLabels(labels, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + callback = callback || index_js_1.util.noop; + this.setMetadata({ labels }, options, callback); + } + setMetadata(metadata, optionsOrCallback, cb) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = + typeof optionsOrCallback === 'function' + ? optionsOrCallback + : cb; + this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, AvailableServiceObjectMethods.setMetadata, options); + super + .setMetadata(metadata, options) + .then(resp => cb(null, ...resp)) + .catch(cb) + .finally(() => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + }); + } + /** + * Lock all objects contained in the bucket, based on their creation time. Any + * attempt to overwrite or delete objects younger than the retention period + * will result in a `PERMISSION_DENIED` error. + * + * An unlocked retention policy can be modified or removed from the bucket via + * {@link File#removeRetentionPeriod} and {@link File#setRetentionPeriod}. A + * locked retention policy cannot be removed or shortened in duration for the + * lifetime of the bucket. Attempting to remove or decrease period of a locked + * retention policy will result in a `PERMISSION_DENIED` error. You can still + * increase the policy. + * + * @param {*} duration In seconds, the minimum retention time for all objects + * contained in this bucket. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @param {SetBucketMetadataCallback} [options] Options, including precondition options. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const bucket = storage.bucket('albums'); + * + * const DURATION_SECONDS = 15780000; // 6 months. + * + * //- + * // Lock the objects in this bucket for 6 months. + * //- + * bucket.setRetentionPeriod(DURATION_SECONDS, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setRetentionPeriod(DURATION_SECONDS).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + setRetentionPeriod(duration, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + this.setMetadata({ + retentionPolicy: { + retentionPeriod: duration.toString(), + }, + }, options, callback); + } + /** + * + * @typedef {object} Cors + * @property {number} [maxAgeSeconds] The number of seconds the browser is + * allowed to make requests before it must repeat the preflight request. + * @property {string[]} [method] HTTP method allowed for cross origin resource + * sharing with this bucket. + * @property {string[]} [origin] an origin allowed for cross origin resource + * sharing with this bucket. + * @property {string[]} [responseHeader] A header allowed for cross origin + * resource sharing with this bucket. + */ + /** + * This can be used to set the CORS configuration on the bucket. + * + * The configuration will be overwritten with the value passed into this. + * + * @param {Cors[]} corsConfiguration The new CORS configuration to set + * @param {number} [corsConfiguration.maxAgeSeconds] The number of seconds the browser is + * allowed to make requests before it must repeat the preflight request. + * @param {string[]} [corsConfiguration.method] HTTP method allowed for cross origin resource + * sharing with this bucket. + * @param {string[]} [corsConfiguration.origin] an origin allowed for cross origin resource + * sharing with this bucket. + * @param {string[]} [corsConfiguration.responseHeader] A header allowed for cross origin + * resource sharing with this bucket. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @param {SetBucketMetadataOptions} [options] Options, including precondition options. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const bucket = storage.bucket('albums'); + * + * const corsConfiguration = [{maxAgeSeconds: 3600}]; // 1 hour + * bucket.setCorsConfiguration(corsConfiguration); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setCorsConfiguration(corsConfiguration).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + setCorsConfiguration(corsConfiguration, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + this.setMetadata({ + cors: corsConfiguration, + }, options, callback); + } + /** + * @typedef {object} SetBucketStorageClassOptions + * @property {string} [userProject] - The ID of the project which will be + * billed for the request. + */ + /** + * @callback SetBucketStorageClassCallback + * @param {?Error} err Request error, if any. + */ + /** + * Set the default storage class for new files in this bucket. + * + * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes} + * + * @param {string} storageClass The new storage class. (`standard`, + * `nearline`, `coldline`, or `archive`). + * **Note:** The storage classes `multi_regional`, `regional`, and + * `durable_reduced_availability` are now legacy and will be deprecated in + * the future. + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] - The ID of the project which will be + * billed for the request. + * @param {SetStorageClassCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.setStorageClass('nearline', function(err, apiResponse) { + * if (err) { + * // Error handling omitted. + * } + * + * // The storage class was updated successfully. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setStorageClass('nearline').then(function() {}); + * ``` + */ + setStorageClass(storageClass, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + // In case we get input like `storageClass`, convert to `storage_class`. + storageClass = storageClass + .replace(/-/g, '_') + .replace(/([a-z])([A-Z])/g, (_, low, up) => { + return low + '_' + up; + }) + .toUpperCase(); + this.setMetadata({ storageClass }, options, callback); + } + /** + * Set a user project to be billed for all requests made from this Bucket + * object and any files referenced from this Bucket object. + * + * @param {string} userProject The user project. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.setUserProject('grape-spaceship-123'); + * ``` + */ + setUserProject(userProject) { + this.userProject = userProject; + const methods = [ + 'create', + 'delete', + 'exists', + 'get', + 'getMetadata', + 'setMetadata', + ]; + methods.forEach(method => { + const methodConfig = this.methods[method]; + if (typeof methodConfig === 'object') { + if (typeof methodConfig.reqOpts === 'object') { + Object.assign(methodConfig.reqOpts.qs, { userProject }); + } + else { + methodConfig.reqOpts = { + qs: { userProject }, + }; + } + } + }); + } + /** + * @typedef {object} UploadOptions Configuration options for Bucket#upload(). + * @property {string|File} [destination] The place to save + * your file. If given a string, the file will be uploaded to the bucket + * using the string as a filename. When given a File object, your local + * file will be uploaded to the File object's bucket and under the File + * object's name. Lastly, when this argument is omitted, the file is uploaded + * to your bucket using the name of the local file. + * @property {string} [encryptionKey] A custom encryption key. See + * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. + * @property {boolean} [gzip] Automatically gzip the file. This will set + * `options.metadata.contentEncoding` to `gzip`. + * @property {string} [kmsKeyName] The name of the Cloud KMS key that will + * be used to encrypt the object. Must be in the format: + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. + * @property {object} [metadata] See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body}. + * @property {string} [offset] The starting byte of the upload stream, for + * resuming an interrupted upload. Defaults to 0. + * @property {string} [predefinedAcl] Apply a predefined set of access + * controls to this object. + * + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @property {boolean} [private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @property {boolean} [public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @property {boolean} [resumable=true] Resumable uploads are automatically + * enabled and must be shut off explicitly by setting to false. + * @property {number} [timeout=60000] Set the HTTP request timeout in + * milliseconds. This option is not available for resumable uploads. + * Default: `60000` + * @property {string} [uri] The URI for an already-created resumable + * upload. See {@link File#createResumableUpload}. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string|boolean} [validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with an + * MD5 checksum for maximum reliability. CRC32c will provide better + * performance with less reliability. You may also choose to skip + * validation completely, however this is **not recommended**. + */ + /** + * @typedef {array} UploadResponse + * @property {object} 0 The uploaded {@link File}. + * @property {object} 1 The full API response. + */ + /** + * @callback UploadCallback + * @param {?Error} err Request error, if any. + * @param {object} file The uploaded {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * Upload a file to the bucket. This is a convenience method that wraps + * {@link File#createWriteStream}. + * + * Resumable uploads are enabled by default + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload#uploads| Upload Options (Simple or Resumable)} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert| Objects: insert API Documentation} + * + * @param {string} pathString The fully qualified path to the file you + * wish to upload to your bucket. + * @param {UploadOptions} [options] Configuration options. + * @param {string|File} [options.destination] The place to save + * your file. If given a string, the file will be uploaded to the bucket + * using the string as a filename. When given a File object, your local + * file will be uploaded to the File object's bucket and under the File + * object's name. Lastly, when this argument is omitted, the file is uploaded + * to your bucket using the name of the local file. + * @param {string} [options.encryptionKey] A custom encryption key. See + * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. + * @param {boolean} [options.gzip] Automatically gzip the file. This will set + * `options.metadata.contentEncoding` to `gzip`. + * @param {string} [options.kmsKeyName] The name of the Cloud KMS key that will + * be used to encrypt the object. Must be in the format: + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. + * @param {object} [options.metadata] See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body}. + * @param {string} [options.offset] The starting byte of the upload stream, for + * resuming an interrupted upload. Defaults to 0. + * @param {string} [options.predefinedAcl] Apply a predefined set of access + * controls to this object. + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @param {boolean} [options.private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @param {boolean} [options.public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @param {boolean} [options.resumable=true] Resumable uploads are automatically + * enabled and must be shut off explicitly by setting to false. + * @param {number} [options.timeout=60000] Set the HTTP request timeout in + * milliseconds. This option is not available for resumable uploads. + * Default: `60000` + * @param {string} [options.uri] The URI for an already-created resumable + * upload. See {@link File#createResumableUpload}. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {string|boolean} [options.validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with an + * MD5 checksum for maximum reliability. CRC32c will provide better + * performance with less reliability. You may also choose to skip + * validation completely, however this is **not recommended**. + * @param {UploadCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Upload a file from a local path. + * //- + * bucket.upload('/local/path/image.png', function(err, file, apiResponse) { + * // Your bucket now contains: + * // - "image.png" (with the contents of `/local/path/image.png') + * + * // `file` is an instance of a File object that refers to your new file. + * }); + * + * + * //- + * // It's not always that easy. You will likely want to specify the filename + * // used when your new file lands in your bucket. + * // + * // You may also want to set metadata or customize other options. + * //- + * const options = { + * destination: 'new-image.png', + * validation: 'crc32c', + * metadata: { + * metadata: { + * event: 'Fall trip to the zoo' + * } + * } + * }; + * + * bucket.upload('local-image.png', options, function(err, file) { + * // Your bucket now contains: + * // - "new-image.png" (with the contents of `local-image.png') + * + * // `file` is an instance of a File object that refers to your new file. + * }); + * + * //- + * // You can also have a file gzip'd on the fly. + * //- + * bucket.upload('index.html', { gzip: true }, function(err, file) { + * // Your bucket now contains: + * // - "index.html" (automatically compressed with gzip) + * + * // Downloading the file with `file.download` will automatically decode + * the + * // file. + * }); + * + * //- + * // You may also re-use a File object, {File}, that references + * // the file you wish to create or overwrite. + * //- + * const options = { + * destination: bucket.file('existing-file.png'), + * resumable: false + * }; + * + * bucket.upload('local-img.png', options, function(err, newFile) { + * // Your bucket now contains: + * // - "existing-file.png" (with the contents of `local-img.png') + * + * // Note: + * // The `newFile` parameter is equal to `file`. + * }); + * + * //- + * // To use + * // + * // Customer-supplied Encryption Keys, provide the `encryptionKey` + * option. + * //- + * const crypto = require('crypto'); + * const encryptionKey = crypto.randomBytes(32); + * + * bucket.upload('img.png', { + * encryptionKey: encryptionKey + * }, function(err, newFile) { + * // `img.png` was uploaded with your custom encryption key. + * + * // `newFile` is already configured to use the encryption key when making + * // operations on the remote object. + * + * // However, to use your encryption key later, you must create a `File` + * // instance with the `key` supplied: + * const file = bucket.file('img.png', { + * encryptionKey: encryptionKey + * }); + * + * // Or with `file#setEncryptionKey`: + * const file = bucket.file('img.png'); + * file.setEncryptionKey(encryptionKey); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.upload('local-image.png').then(function(data) { + * const file = data[0]; + * }); + * + * To upload a file from a URL, use {@link File#createWriteStream}. + * + * ``` + * @example include:samples/files.js + * region_tag:storage_upload_file + * Another example: + * + * @example include:samples/encryption.js + * region_tag:storage_upload_encrypted_file + * Example of uploading an encrypted file: + */ + upload(pathString, optionsOrCallback, callback) { + var _a, _b; + const upload = (numberOfRetries) => { + const returnValue = (0, async_retry_1.default)(async (bail) => { + await new Promise((resolve, reject) => { + var _a, _b; + if (numberOfRetries === 0 && + ((_b = (_a = newFile === null || newFile === void 0 ? void 0 : newFile.storage) === null || _a === void 0 ? void 0 : _a.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry)) { + newFile.storage.retryOptions.autoRetry = false; + } + const writable = newFile.createWriteStream(options); + if (options.onUploadProgress) { + writable.on('progress', options.onUploadProgress); + } + fs.createReadStream(pathString) + .on('error', bail) + .pipe(writable) + .on('error', err => { + if (this.storage.retryOptions.autoRetry && + this.storage.retryOptions.retryableErrorFn(err)) { + return reject(err); + } + else { + return bail(err); + } + }) + .on('finish', () => { + return resolve(); + }); + }); + }, { + retries: numberOfRetries, + factor: this.storage.retryOptions.retryDelayMultiplier, + maxTimeout: this.storage.retryOptions.maxRetryDelay * 1000, //convert to milliseconds + maxRetryTime: this.storage.retryOptions.totalTimeout * 1000, //convert to milliseconds + }); + if (!callback) { + return returnValue; + } + else { + return returnValue + .then(() => { + if (callback) { + return callback(null, newFile, newFile.metadata); + } + }) + .catch(callback); + } + }; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + if (global['GCLOUD_SANDBOX_ENV']) { + return; + } + let options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + options = Object.assign({ + metadata: {}, + }, options); + // Do not retry if precondition option ifGenerationMatch is not set + // because this is a file operation + let maxRetries = this.storage.retryOptions.maxRetries; + if ((((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined && + ((_b = this.instancePreconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + maxRetries = 0; + } + let newFile; + if (options.destination instanceof file_js_1.File) { + newFile = options.destination; + } + else if (options.destination !== null && + typeof options.destination === 'string') { + // Use the string as the name of the file. + newFile = this.file(options.destination, { + encryptionKey: options.encryptionKey, + kmsKeyName: options.kmsKeyName, + preconditionOpts: this.instancePreconditionOpts, + }); + } + else { + // Resort to using the name of the incoming file. + const destination = path.basename(pathString); + newFile = this.file(destination, { + encryptionKey: options.encryptionKey, + kmsKeyName: options.kmsKeyName, + preconditionOpts: this.instancePreconditionOpts, + }); + } + upload(maxRetries); + } + /** + * @private + * + * @typedef {object} MakeAllFilesPublicPrivateOptions + * @property {boolean} [force] Suppress errors until all files have been + * processed. + * @property {boolean} [private] Make files private. + * @property {boolean} [public] Make files public. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @private + * + * @callback SetBucketMetadataCallback + * @param {?Error} err Request error, if any. + * @param {File[]} files Files that were updated. + */ + /** + * @typedef {array} MakeAllFilesPublicPrivateResponse + * @property {File[]} 0 List of files affected. + */ + /** + * Iterate over all of a bucket's files, calling `file.makePublic()` (public) + * or `file.makePrivate()` (private) on each. + * + * Operations are performed in parallel, up to 10 at once. The first error + * breaks the loop, and will execute the provided callback with it. Specify + * `{ force: true }` to suppress the errors. + * + * @private + * + * @param {MakeAllFilesPublicPrivateOptions} [options] Configuration options. + * @param {boolean} [options.force] Suppress errors until all files have been + * processed. + * @param {boolean} [options.private] Make files private. + * @param {boolean} [options.public] Make files public. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + + * @param {MakeAllFilesPublicPrivateCallback} callback Callback function. + * + * @return {Promise} + */ + makeAllFilesPublicPrivate_(optionsOrCallback, callback) { + const MAX_PARALLEL_LIMIT = 10; + const errors = []; + const updatedFiles = []; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const processFile = async (file) => { + try { + await (options.public ? file.makePublic() : file.makePrivate(options)); + updatedFiles.push(file); + } + catch (e) { + if (!options.force) { + throw e; + } + errors.push(e); + } + }; + this.getFiles(options) + .then(([files]) => { + const limit = (0, p_limit_1.default)(MAX_PARALLEL_LIMIT); + const promises = files.map(file => { + return limit(() => processFile(file)); + }); + return Promise.all(promises); + }) + .then(() => callback(errors.length > 0 ? errors : null, updatedFiles), err => callback(err, updatedFiles)); + } + getId() { + return this.id; + } + disableAutoRetryConditionallyIdempotent_( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + coreOpts, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + methodType, localPreconditionOptions) { + var _a, _b; + if (typeof coreOpts === 'object' && + ((_b = (_a = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _a === void 0 ? void 0 : _a.qs) === null || _b === void 0 ? void 0 : _b.ifMetagenerationMatch) === undefined && + (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifMetagenerationMatch) === undefined && + (methodType === AvailableServiceObjectMethods.setMetadata || + methodType === AvailableServiceObjectMethods.delete) && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) { + this.storage.retryOptions.autoRetry = false; + } + else if (this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + this.storage.retryOptions.autoRetry = false; + } + } +} +exports.Bucket = Bucket; +/*! Developer Documentation + * + * These methods can be auto-paginated. + */ +paginator_1.paginator.extend(Bucket, 'getFiles'); +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Bucket, { + exclude: ['cloudStorageURI', 'request', 'file', 'notification', 'restore'], +}); + + +/***/ }), + +/***/ 2658: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Channel = void 0; +const index_js_1 = __nccwpck_require__(1325); +const promisify_1 = __nccwpck_require__(206); +/** + * Create a channel object to interact with a Cloud Storage channel. + * + * See {@link https://cloud.google.com/storage/docs/object-change-notification| Object Change Notification} + * + * @class + * + * @param {string} id The ID of the channel. + * @param {string} resourceId The resource ID of the channel. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const channel = storage.channel('id', 'resource-id'); + * ``` + */ +class Channel extends index_js_1.ServiceObject { + constructor(storage, id, resourceId) { + const config = { + parent: storage, + baseUrl: '/channels', + // An ID shouldn't be included in the API requests. + // RE: + // https://github.com/GoogleCloudPlatform/google-cloud-node/issues/1145 + id: '', + methods: { + // Only need `request`. + }, + }; + super(config); + this.metadata.id = id; + this.metadata.resourceId = resourceId; + } + /** + * @typedef {array} StopResponse + * @property {object} 0 The full API response. + */ + /** + * @callback StopCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Stop this channel. + * + * @param {StopCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const channel = storage.channel('id', 'resource-id'); + * channel.stop(function(err, apiResponse) { + * if (!err) { + * // Channel stopped successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * channel.stop().then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + stop(callback) { + callback = callback || index_js_1.util.noop; + this.request({ + method: 'POST', + uri: '/stop', + json: this.metadata, + }, (err, apiResponse) => { + callback(err, apiResponse); + }); + } +} +exports.Channel = Channel; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Channel); + + +/***/ }), + +/***/ 4317: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _CRC32C_crc32c; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.CRC32C_EXTENSION_TABLE = exports.CRC32C_EXTENSIONS = exports.CRC32C_EXCEPTION_MESSAGES = exports.CRC32C_DEFAULT_VALIDATOR_GENERATOR = exports.CRC32C = void 0; +const fs_1 = __nccwpck_require__(9896); +/** + * Ported from {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc#L16-L59 github.com/google/crc32c} + */ +const CRC32C_EXTENSIONS = [ + 0x00000000, 0xf26b8303, 0xe13b70f7, 0x1350f3f4, 0xc79a971f, 0x35f1141c, + 0x26a1e7e8, 0xd4ca64eb, 0x8ad958cf, 0x78b2dbcc, 0x6be22838, 0x9989ab3b, + 0x4d43cfd0, 0xbf284cd3, 0xac78bf27, 0x5e133c24, 0x105ec76f, 0xe235446c, + 0xf165b798, 0x030e349b, 0xd7c45070, 0x25afd373, 0x36ff2087, 0xc494a384, + 0x9a879fa0, 0x68ec1ca3, 0x7bbcef57, 0x89d76c54, 0x5d1d08bf, 0xaf768bbc, + 0xbc267848, 0x4e4dfb4b, 0x20bd8ede, 0xd2d60ddd, 0xc186fe29, 0x33ed7d2a, + 0xe72719c1, 0x154c9ac2, 0x061c6936, 0xf477ea35, 0xaa64d611, 0x580f5512, + 0x4b5fa6e6, 0xb93425e5, 0x6dfe410e, 0x9f95c20d, 0x8cc531f9, 0x7eaeb2fa, + 0x30e349b1, 0xc288cab2, 0xd1d83946, 0x23b3ba45, 0xf779deae, 0x05125dad, + 0x1642ae59, 0xe4292d5a, 0xba3a117e, 0x4851927d, 0x5b016189, 0xa96ae28a, + 0x7da08661, 0x8fcb0562, 0x9c9bf696, 0x6ef07595, 0x417b1dbc, 0xb3109ebf, + 0xa0406d4b, 0x522bee48, 0x86e18aa3, 0x748a09a0, 0x67dafa54, 0x95b17957, + 0xcba24573, 0x39c9c670, 0x2a993584, 0xd8f2b687, 0x0c38d26c, 0xfe53516f, + 0xed03a29b, 0x1f682198, 0x5125dad3, 0xa34e59d0, 0xb01eaa24, 0x42752927, + 0x96bf4dcc, 0x64d4cecf, 0x77843d3b, 0x85efbe38, 0xdbfc821c, 0x2997011f, + 0x3ac7f2eb, 0xc8ac71e8, 0x1c661503, 0xee0d9600, 0xfd5d65f4, 0x0f36e6f7, + 0x61c69362, 0x93ad1061, 0x80fde395, 0x72966096, 0xa65c047d, 0x5437877e, + 0x4767748a, 0xb50cf789, 0xeb1fcbad, 0x197448ae, 0x0a24bb5a, 0xf84f3859, + 0x2c855cb2, 0xdeeedfb1, 0xcdbe2c45, 0x3fd5af46, 0x7198540d, 0x83f3d70e, + 0x90a324fa, 0x62c8a7f9, 0xb602c312, 0x44694011, 0x5739b3e5, 0xa55230e6, + 0xfb410cc2, 0x092a8fc1, 0x1a7a7c35, 0xe811ff36, 0x3cdb9bdd, 0xceb018de, + 0xdde0eb2a, 0x2f8b6829, 0x82f63b78, 0x709db87b, 0x63cd4b8f, 0x91a6c88c, + 0x456cac67, 0xb7072f64, 0xa457dc90, 0x563c5f93, 0x082f63b7, 0xfa44e0b4, + 0xe9141340, 0x1b7f9043, 0xcfb5f4a8, 0x3dde77ab, 0x2e8e845f, 0xdce5075c, + 0x92a8fc17, 0x60c37f14, 0x73938ce0, 0x81f80fe3, 0x55326b08, 0xa759e80b, + 0xb4091bff, 0x466298fc, 0x1871a4d8, 0xea1a27db, 0xf94ad42f, 0x0b21572c, + 0xdfeb33c7, 0x2d80b0c4, 0x3ed04330, 0xccbbc033, 0xa24bb5a6, 0x502036a5, + 0x4370c551, 0xb11b4652, 0x65d122b9, 0x97baa1ba, 0x84ea524e, 0x7681d14d, + 0x2892ed69, 0xdaf96e6a, 0xc9a99d9e, 0x3bc21e9d, 0xef087a76, 0x1d63f975, + 0x0e330a81, 0xfc588982, 0xb21572c9, 0x407ef1ca, 0x532e023e, 0xa145813d, + 0x758fe5d6, 0x87e466d5, 0x94b49521, 0x66df1622, 0x38cc2a06, 0xcaa7a905, + 0xd9f75af1, 0x2b9cd9f2, 0xff56bd19, 0x0d3d3e1a, 0x1e6dcdee, 0xec064eed, + 0xc38d26c4, 0x31e6a5c7, 0x22b65633, 0xd0ddd530, 0x0417b1db, 0xf67c32d8, + 0xe52cc12c, 0x1747422f, 0x49547e0b, 0xbb3ffd08, 0xa86f0efc, 0x5a048dff, + 0x8ecee914, 0x7ca56a17, 0x6ff599e3, 0x9d9e1ae0, 0xd3d3e1ab, 0x21b862a8, + 0x32e8915c, 0xc083125f, 0x144976b4, 0xe622f5b7, 0xf5720643, 0x07198540, + 0x590ab964, 0xab613a67, 0xb831c993, 0x4a5a4a90, 0x9e902e7b, 0x6cfbad78, + 0x7fab5e8c, 0x8dc0dd8f, 0xe330a81a, 0x115b2b19, 0x020bd8ed, 0xf0605bee, + 0x24aa3f05, 0xd6c1bc06, 0xc5914ff2, 0x37faccf1, 0x69e9f0d5, 0x9b8273d6, + 0x88d28022, 0x7ab90321, 0xae7367ca, 0x5c18e4c9, 0x4f48173d, 0xbd23943e, + 0xf36e6f75, 0x0105ec76, 0x12551f82, 0xe03e9c81, 0x34f4f86a, 0xc69f7b69, + 0xd5cf889d, 0x27a40b9e, 0x79b737ba, 0x8bdcb4b9, 0x988c474d, 0x6ae7c44e, + 0xbe2da0a5, 0x4c4623a6, 0x5f16d052, 0xad7d5351, +]; +exports.CRC32C_EXTENSIONS = CRC32C_EXTENSIONS; +const CRC32C_EXTENSION_TABLE = new Int32Array(CRC32C_EXTENSIONS); +exports.CRC32C_EXTENSION_TABLE = CRC32C_EXTENSION_TABLE; +const CRC32C_DEFAULT_VALIDATOR_GENERATOR = () => new CRC32C(); +exports.CRC32C_DEFAULT_VALIDATOR_GENERATOR = CRC32C_DEFAULT_VALIDATOR_GENERATOR; +const CRC32C_EXCEPTION_MESSAGES = { + INVALID_INIT_BASE64_RANGE: (l) => `base64-encoded data expected to equal 4 bytes, not ${l}`, + INVALID_INIT_BUFFER_LENGTH: (l) => `Buffer expected to equal 4 bytes, not ${l}`, + INVALID_INIT_INTEGER: (l) => `Number expected to be a safe, unsigned 32-bit integer, not ${l}`, +}; +exports.CRC32C_EXCEPTION_MESSAGES = CRC32C_EXCEPTION_MESSAGES; +class CRC32C { + /** + * Constructs a new `CRC32C` object. + * + * Reconstruction is recommended via the `CRC32C.from` static method. + * + * @param initialValue An initial CRC32C value - a signed 32-bit integer. + */ + constructor(initialValue = 0) { + /** Current CRC32C value */ + _CRC32C_crc32c.set(this, 0); + __classPrivateFieldSet(this, _CRC32C_crc32c, initialValue, "f"); + } + /** + * Calculates a CRC32C from a provided buffer. + * + * Implementation inspired from: + * - {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc github.com/google/crc32c} + * - {@link https://github.com/googleapis/python-crc32c/blob/a595e758c08df445a99c3bf132ee8e80a3ec4308/src/google_crc32c/python.py github.com/googleapis/python-crc32c} + * - {@link https://github.com/googleapis/java-storage/pull/1376/files github.com/googleapis/java-storage} + * + * @param data The `Buffer` to generate the CRC32C from + */ + update(data) { + let current = __classPrivateFieldGet(this, _CRC32C_crc32c, "f") ^ 0xffffffff; + for (const d of data) { + const tablePoly = CRC32C.CRC32C_EXTENSION_TABLE[(d ^ current) & 0xff]; + current = tablePoly ^ (current >>> 8); + } + __classPrivateFieldSet(this, _CRC32C_crc32c, current ^ 0xffffffff, "f"); + } + /** + * Validates a provided input to the current CRC32C value. + * + * @param input A Buffer, `CRC32C`-compatible object, base64-encoded data (string), or signed 32-bit integer + */ + validate(input) { + if (typeof input === 'number') { + return input === __classPrivateFieldGet(this, _CRC32C_crc32c, "f"); + } + else if (typeof input === 'string') { + return input === this.toString(); + } + else if (Buffer.isBuffer(input)) { + return Buffer.compare(input, this.toBuffer()) === 0; + } + else { + // `CRC32C`-like object + return input.toString() === this.toString(); + } + } + /** + * Returns a `Buffer` representation of the CRC32C value + */ + toBuffer() { + const buffer = Buffer.alloc(4); + buffer.writeInt32BE(__classPrivateFieldGet(this, _CRC32C_crc32c, "f")); + return buffer; + } + /** + * Returns a JSON-compatible, base64-encoded representation of the CRC32C value. + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify `JSON#stringify`} + */ + toJSON() { + return this.toString(); + } + /** + * Returns a base64-encoded representation of the CRC32C value. + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/toString `Object#toString`} + */ + toString() { + return this.toBuffer().toString('base64'); + } + /** + * Returns the `number` representation of the CRC32C value as a signed 32-bit integer + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/valueOf `Object#valueOf`} + */ + valueOf() { + return __classPrivateFieldGet(this, _CRC32C_crc32c, "f"); + } + /** + * Generates a `CRC32C` from a compatible buffer format. + * + * @param value 4-byte `ArrayBufferView`/`Buffer`/`TypedArray` + */ + static fromBuffer(value) { + let buffer; + if (Buffer.isBuffer(value)) { + buffer = value; + } + else if ('buffer' in value) { + // `ArrayBufferView` + buffer = Buffer.from(value.buffer); + } + else { + // `ArrayBuffer` + buffer = Buffer.from(value); + } + if (buffer.byteLength !== 4) { + throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_BUFFER_LENGTH(buffer.byteLength)); + } + return new CRC32C(buffer.readInt32BE()); + } + static async fromFile(file) { + const crc32c = new CRC32C(); + await new Promise((resolve, reject) => { + (0, fs_1.createReadStream)(file) + .on('data', (d) => { + if (typeof d === 'string') { + crc32c.update(Buffer.from(d)); + } + else { + crc32c.update(d); + } + }) + .on('end', () => resolve()) + .on('error', reject); + }); + return crc32c; + } + /** + * Generates a `CRC32C` from 4-byte base64-encoded data (string). + * + * @param value 4-byte base64-encoded data (string) + */ + static fromString(value) { + const buffer = Buffer.from(value, 'base64'); + if (buffer.byteLength !== 4) { + throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_BASE64_RANGE(buffer.byteLength)); + } + return this.fromBuffer(buffer); + } + /** + * Generates a `CRC32C` from a safe, unsigned 32-bit integer. + * + * @param value an unsigned 32-bit integer + */ + static fromNumber(value) { + if (!Number.isSafeInteger(value) || value > 2 ** 32 || value < -(2 ** 32)) { + throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_INTEGER(value)); + } + return new CRC32C(value); + } + /** + * Generates a `CRC32C` from a variety of compatable types. + * Note: strings are treated as input, not as file paths to read from. + * + * @param value A number, 4-byte `ArrayBufferView`/`Buffer`/`TypedArray`, or 4-byte base64-encoded data (string) + */ + static from(value) { + if (typeof value === 'number') { + return this.fromNumber(value); + } + else if (typeof value === 'string') { + return this.fromString(value); + } + else if ('byteLength' in value) { + // `ArrayBuffer` | `Buffer` | `ArrayBufferView` + return this.fromBuffer(value); + } + else { + // `CRC32CValidator`/`CRC32C`-like + return this.fromString(value.toString()); + } + } +} +exports.CRC32C = CRC32C; +_CRC32C_crc32c = new WeakMap(); +CRC32C.CRC32C_EXTENSIONS = CRC32C_EXTENSIONS; +CRC32C.CRC32C_EXTENSION_TABLE = CRC32C_EXTENSION_TABLE; + + +/***/ }), + +/***/ 9975: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +var _File_instances, _File_validateIntegrity; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.File = exports.FileExceptionMessages = exports.RequestError = exports.STORAGE_POST_POLICY_BASE_URL = exports.ActionToHTTPMethod = void 0; +const index_js_1 = __nccwpck_require__(1325); +const promisify_1 = __nccwpck_require__(206); +const crypto = __importStar(__nccwpck_require__(6982)); +const fs = __importStar(__nccwpck_require__(9896)); +const mime_1 = __importDefault(__nccwpck_require__(4900)); +const resumableUpload = __importStar(__nccwpck_require__(8043)); +const stream_1 = __nccwpck_require__(2203); +const zlib = __importStar(__nccwpck_require__(3106)); +const storage_js_1 = __nccwpck_require__(2848); +const bucket_js_1 = __nccwpck_require__(2887); +const acl_js_1 = __nccwpck_require__(6637); +const signer_js_1 = __nccwpck_require__(7319); +const util_js_1 = __nccwpck_require__(7325); +const duplexify_1 = __importDefault(__nccwpck_require__(9112)); +const util_js_2 = __nccwpck_require__(4557); +const crc32c_js_1 = __nccwpck_require__(4317); +const hash_stream_validator_js_1 = __nccwpck_require__(4873); +const async_retry_1 = __importDefault(__nccwpck_require__(5195)); +var ActionToHTTPMethod; +(function (ActionToHTTPMethod) { + ActionToHTTPMethod["read"] = "GET"; + ActionToHTTPMethod["write"] = "PUT"; + ActionToHTTPMethod["delete"] = "DELETE"; + ActionToHTTPMethod["resumable"] = "POST"; +})(ActionToHTTPMethod || (exports.ActionToHTTPMethod = ActionToHTTPMethod = {})); +/** + * @deprecated - no longer used + */ +exports.STORAGE_POST_POLICY_BASE_URL = 'https://storage.googleapis.com'; +/** + * @private + */ +const GS_URL_REGEXP = /^gs:\/\/([a-z0-9_.-]+)\/(.+)$/; +/** + * @private + * This regex will match compressible content types. These are primarily text/*, +json, +text, +xml content types. + * This was based off of mime-db and may periodically need to be updated if new compressible content types become + * standards. + */ +const COMPRESSIBLE_MIME_REGEX = new RegExp([ + /^text\/|application\/ecmascript|application\/javascript|application\/json/, + /|application\/postscript|application\/rtf|application\/toml|application\/vnd.dart/, + /|application\/vnd.ms-fontobject|application\/wasm|application\/x-httpd-php|application\/x-ns-proxy-autoconfig/, + /|application\/x-sh(?!ockwave-flash)|application\/x-tar|application\/x-virtualbox-hdd|application\/x-virtualbox-ova|application\/x-virtualbox-ovf/, + /|^application\/x-virtualbox-vbox$|application\/x-virtualbox-vdi|application\/x-virtualbox-vhd|application\/x-virtualbox-vmdk/, + /|application\/xml|application\/xml-dtd|font\/otf|font\/ttf|image\/bmp|image\/vnd.adobe.photoshop|image\/vnd.microsoft.icon/, + /|image\/vnd.ms-dds|image\/x-icon|image\/x-ms-bmp|message\/rfc822|model\/gltf-binary|\+json|\+text|\+xml|\+yaml/, +] + .map(r => r.source) + .join(''), 'i'); +class RequestError extends Error { +} +exports.RequestError = RequestError; +const SEVEN_DAYS = 7 * 24 * 60 * 60; +const GS_UTIL_URL_REGEX = /(gs):\/\/([a-z0-9_.-]+)\/(.+)/g; +const HTTPS_PUBLIC_URL_REGEX = /(https):\/\/(storage\.googleapis\.com)\/([a-z0-9_.-]+)\/(.+)/g; +var FileExceptionMessages; +(function (FileExceptionMessages) { + FileExceptionMessages["EXPIRATION_TIME_NA"] = "An expiration time is not available."; + FileExceptionMessages["DESTINATION_NO_NAME"] = "Destination file should have a name."; + FileExceptionMessages["INVALID_VALIDATION_FILE_RANGE"] = "Cannot use validation with file ranges (start/end)."; + FileExceptionMessages["MD5_NOT_AVAILABLE"] = "MD5 verification was specified, but is not available for the requested object. MD5 is not available for composite objects."; + FileExceptionMessages["EQUALS_CONDITION_TWO_ELEMENTS"] = "Equals condition must be an array of 2 elements."; + FileExceptionMessages["STARTS_WITH_TWO_ELEMENTS"] = "StartsWith condition must be an array of 2 elements."; + FileExceptionMessages["CONTENT_LENGTH_RANGE_MIN_MAX"] = "ContentLengthRange must have numeric min & max fields."; + FileExceptionMessages["DOWNLOAD_MISMATCH"] = "The downloaded data did not match the data from the server. To be sure the content is the same, you should download the file again."; + FileExceptionMessages["UPLOAD_MISMATCH_DELETE_FAIL"] = "The uploaded data did not match the data from the server.\n As a precaution, we attempted to delete the file, but it was not successful.\n To be sure the content is the same, you should try removing the file manually,\n then uploading the file again.\n \n\nThe delete attempt failed with this message:\n\n "; + FileExceptionMessages["UPLOAD_MISMATCH"] = "The uploaded data did not match the data from the server.\n As a precaution, the file has been deleted.\n To be sure the content is the same, you should try uploading the file again."; + FileExceptionMessages["MD5_RESUMED_UPLOAD"] = "MD5 cannot be used with a continued resumable upload as MD5 cannot be extended from an existing value"; + FileExceptionMessages["MISSING_RESUME_CRC32C_FINAL_UPLOAD"] = "The CRC32C is missing for the final portion of a resumed upload, which is required for validation. Please provide `resumeCRC32C` if validation is required, or disable `validation`."; +})(FileExceptionMessages || (exports.FileExceptionMessages = FileExceptionMessages = {})); +/** + * A File object is created from your {@link Bucket} object using + * {@link Bucket#file}. + * + * @class + */ +class File extends index_js_1.ServiceObject { + /** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * An ACL consists of one or more entries, where each entry grants permissions + * to an entity. Permissions define the actions that can be performed against + * an object or bucket (for example, `READ` or `WRITE`); the entity defines + * who the permission applies to (for example, a specific user or group of + * users). + * + * The `acl` object on a File instance provides methods to get you a list of + * the ACLs defined on your bucket, as well as set, update, and delete them. + * + * See {@link http://goo.gl/6qBBPO| About Access Control lists} + * + * @name File#acl + * @mixes Acl + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * //- + * // Make a file publicly readable. + * //- + * const options = { + * entity: 'allUsers', + * role: storage.acl.READER_ROLE + * }; + * + * file.acl.add(options, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + /** + * The API-formatted resource description of the file. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name File#metadata + * @type {object} + */ + /** + * The file's name. + * @name File#name + * @type {string} + */ + /** + * @callback Crc32cGeneratorToStringCallback + * A method returning the CRC32C as a base64-encoded string. + * + * @returns {string} + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ + /** + * @callback Crc32cGeneratorValidateCallback + * A method validating a base64-encoded CRC32C string. + * + * @param {string} [value] base64-encoded CRC32C string to validate + * @returns {boolean} + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + **/ + /** + * @callback Crc32cGeneratorUpdateCallback + * A method for passing `Buffer`s for CRC32C generation. + * + * @param {Buffer} [data] data to update CRC32C value with + * @returns {undefined} + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + **/ + /** + * @typedef {object} CRC32CValidator + * @property {Crc32cGeneratorToStringCallback} + * @property {Crc32cGeneratorValidateCallback} + * @property {Crc32cGeneratorUpdateCallback} + */ + /** + * @callback Crc32cGeneratorCallback + * @returns {CRC32CValidator} + */ + /** + * @typedef {object} FileOptions Options passed to the File constructor. + * @property {string} [encryptionKey] A custom encryption key. + * @property {number} [generation] Generation to scope the file to. + * @property {string} [kmsKeyName] Cloud KMS Key used to encrypt this + * object, if the object is encrypted by such a key. Limited availability; + * usable only by enabled projects. + * @property {string} [userProject] The ID of the project which will be + * billed for all requests made from File object. + * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C} + */ + /** + * Constructs a file object. + * + * @param {Bucket} bucket The Bucket instance this file is + * attached to. + * @param {string} name The name of the remote file. + * @param {FileOptions} [options] Configuration options. + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * ``` + */ + constructor(bucket, name, options = {}) { + var _a, _b; + const requestQueryObject = {}; + let generation; + if (options.generation !== null) { + if (typeof options.generation === 'string') { + generation = Number(options.generation); + } + else { + generation = options.generation; + } + if (!isNaN(generation)) { + requestQueryObject.generation = generation; + } + } + Object.assign(requestQueryObject, options.preconditionOpts); + const userProject = options.userProject || bucket.userProject; + if (typeof userProject === 'string') { + requestQueryObject.userProject = userProject; + } + const methods = { + /** + * @typedef {array} DeleteFileResponse + * @property {object} 0 The full API response. + */ + /** + * @callback DeleteFileCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Delete the file. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/delete| Objects: delete API Documentation} + * + * @method File#delete + * @param {object} [options] Configuration options. + * @param {boolean} [options.ignoreNotFound = false] Ignore an error if + * the file does not exist. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {DeleteFileCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * file.delete(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.delete().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_delete_file + * Another example: + */ + delete: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} FileExistsResponse + * @property {boolean} 0 Whether the {@link File} exists. + */ + /** + * @callback FileExistsCallback + * @param {?Error} err Request error, if any. + * @param {boolean} exists Whether the {@link File} exists. + */ + /** + * Check if the file exists. + * + * @method File#exists + * @param {options} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {FileExistsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.exists(function(err, exists) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.exists().then(function(data) { + * const exists = data[0]; + * }); + * ``` + */ + exists: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} GetFileResponse + * @property {File} 0 The {@link File}. + * @property {object} 1 The full API response. + */ + /** + * @callback GetFileCallback + * @param {?Error} err Request error, if any. + * @param {File} file The {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * Get a file object and its metadata if it exists. + * + * @method File#get + * @param {options} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {number} [options.generation] The generation number to get + * @param {string} [options.restoreToken] If this is a soft-deleted object in an HNS-enabled bucket, returns the restore token which will + * be necessary to restore it if there's a name conflict with another object. + * @param {boolean} [options.softDeleted] If true, returns the soft-deleted object. + Object `generation` is required if `softDeleted` is set to True. + * @param {GetFileCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.get(function(err, file, apiResponse) { + * // file.metadata` has been populated. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.get().then(function(data) { + * const file = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + get: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} GetFileMetadataResponse + * @property {object} 0 The {@link File} metadata. + * @property {object} 1 The full API response. + */ + /** + * @callback GetFileMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata The {@link File} metadata. + * @param {object} apiResponse The full API response. + */ + /** + * Get the file's metadata. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/get| Objects: get API Documentation} + * + * @method File#getMetadata + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetFileMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.getMetadata(function(err, metadata, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.getMetadata().then(function(data) { + * const metadata = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_get_metadata + * Another example: + */ + getMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {object} SetFileMetadataOptions Configuration options for File#setMetadata(). + * @param {string} [userProject] The ID of the project which will be billed for the request. + */ + /** + * @callback SetFileMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} SetFileMetadataResponse + * @property {object} 0 The full API response. + */ + /** + * Merge the given metadata with the current remote file's metadata. This + * will set metadata if it was previously unset or update previously set + * metadata. To unset previously set metadata, set its value to null. + * + * You can set custom key/value pairs in the metadata key of the given + * object, however the other properties outside of this object must adhere + * to the {@link https://goo.gl/BOnnCK| official API documentation}. + * + * + * See the examples below for more information. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/patch| Objects: patch API Documentation} + * + * @method File#setMetadata + * @param {object} [metadata] The metadata you wish to update. + * @param {SetFileMetadataOptions} [options] Configuration options. + * @param {SetFileMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * const metadata = { + * contentType: 'application/x-font-ttf', + * metadata: { + * my: 'custom', + * properties: 'go here' + * } + * }; + * + * file.setMetadata(metadata, function(err, apiResponse) {}); + * + * // Assuming current metadata = { hello: 'world', unsetMe: 'will do' } + * file.setMetadata({ + * metadata: { + * abc: '123', // will be set. + * unsetMe: null, // will be unset (deleted). + * hello: 'goodbye' // will be updated from 'world' to 'goodbye'. + * } + * }, function(err, apiResponse) { + * // metadata should now be { abc: '123', hello: 'goodbye' } + * }); + * + * //- + * // Set a temporary hold on this file from its bucket's retention period + * // configuration. + * // + * file.setMetadata({ + * temporaryHold: true + * }, function(err, apiResponse) {}); + * + * //- + * // Alternatively, you may set a temporary hold. This will follow the + * // same behavior as an event-based hold, with the exception that the + * // bucket's retention policy will not renew for this file from the time + * // the hold is released. + * //- + * file.setMetadata({ + * eventBasedHold: true + * }, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.setMetadata(metadata).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + setMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + }; + super({ + parent: bucket, + baseUrl: '/o', + id: encodeURIComponent(name), + methods, + }); + _File_instances.add(this); + this.bucket = bucket; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this.storage = bucket.parent; + // @TODO Can this duplicate code from above be avoided? + if (options.generation !== null) { + let generation; + if (typeof options.generation === 'string') { + generation = Number(options.generation); + } + else { + generation = options.generation; + } + if (!isNaN(generation)) { + this.generation = generation; + } + } + this.kmsKeyName = options.kmsKeyName; + this.userProject = userProject; + this.name = name; + if (options.encryptionKey) { + this.setEncryptionKey(options.encryptionKey); + } + this.acl = new acl_js_1.Acl({ + request: this.request.bind(this), + pathPrefix: '/acl', + }); + this.crc32cGenerator = + options.crc32cGenerator || this.bucket.crc32cGenerator; + this.instanceRetryValue = (_b = (_a = this.storage) === null || _a === void 0 ? void 0 : _a.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry; + this.instancePreconditionOpts = options === null || options === void 0 ? void 0 : options.preconditionOpts; + } + /** + * The object's Cloud Storage URI (`gs://`) + * + * @example + * ```ts + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('image.png'); + * + * // `gs://my-bucket/image.png` + * const href = file.cloudStorageURI.href; + * ``` + */ + get cloudStorageURI() { + const uri = this.bucket.cloudStorageURI; + uri.pathname = this.name; + return uri; + } + /** + * A helper method for determining if a request should be retried based on preconditions. + * This should only be used for methods where the idempotency is determined by + * `ifGenerationMatch` + * @private + * + * A request should not be retried under the following conditions: + * - if precondition option `ifGenerationMatch` is not set OR + * - if `idempotencyStrategy` is set to `RetryNever` + */ + shouldRetryBasedOnPreconditionAndIdempotencyStrat(options) { + var _a; + return !(((options === null || options === void 0 ? void 0 : options.ifGenerationMatch) === undefined && + ((_a = this.instancePreconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever); + } + /** + * @typedef {array} CopyResponse + * @property {File} 0 The copied {@link File}. + * @property {object} 1 The full API response. + */ + /** + * @callback CopyCallback + * @param {?Error} err Request error, if any. + * @param {File} copiedFile The copied {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} CopyOptions Configuration options for File#copy(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @property {string} [cacheControl] The cacheControl setting for the new file. + * @property {string} [contentEncoding] The contentEncoding setting for the new file. + * @property {string} [contentType] The contentType setting for the new file. + * @property {string} [destinationKmsKeyName] Resource name of the Cloud + * KMS key, of the form + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`, + * that will be used to encrypt the object. Overwrites the object + * metadata's `kms_key_name` value, if any. + * @property {Metadata} [metadata] Metadata to specify on the copied file. + * @property {string} [predefinedAcl] Set the ACL for the new file. + * @property {string} [token] A previously-returned `rewriteToken` from an + * unfinished rewrite request. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Copy this file to another file. By default, this will copy the file to the + * same bucket, but you can choose to copy it to another Bucket by providing + * a Bucket or File object or a URL starting with "gs://". + * The generation of the file will not be preserved. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/rewrite| Objects: rewrite API Documentation} + * + * @throws {Error} If the destination file is not provided. + * + * @param {string|Bucket|File} destination Destination file. + * @param {CopyOptions} [options] Configuration options. See an + * @param {CopyCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // You can pass in a variety of types for the destination. + * // + * // For all of the below examples, assume we are working with the following + * // Bucket and File objects. + * //- + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('my-image.png'); + * + * //- + * // If you pass in a string for the destination, the file is copied to its + * // current bucket, under the new name provided. + * //- + * file.copy('my-image-copy.png', function(err, copiedFile, apiResponse) { + * // `my-bucket` now contains: + * // - "my-image.png" + * // - "my-image-copy.png" + * + * // `copiedFile` is an instance of a File object that refers to your new + * // file. + * }); + * + * //- + * // If you pass in a string starting with "gs://" for the destination, the + * // file is copied to the other bucket and under the new name provided. + * //- + * const newLocation = 'gs://another-bucket/my-image-copy.png'; + * file.copy(newLocation, function(err, copiedFile, apiResponse) { + * // `my-bucket` still contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-image-copy.png" + * + * // `copiedFile` is an instance of a File object that refers to your new + * // file. + * }); + * + * //- + * // If you pass in a Bucket object, the file will be copied to that bucket + * // using the same name. + * //- + * const anotherBucket = storage.bucket('another-bucket'); + * file.copy(anotherBucket, function(err, copiedFile, apiResponse) { + * // `my-bucket` still contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-image.png" + * + * // `copiedFile` is an instance of a File object that refers to your new + * // file. + * }); + * + * //- + * // If you pass in a File object, you have complete control over the new + * // bucket and filename. + * //- + * const anotherFile = anotherBucket.file('my-awesome-image.png'); + * file.copy(anotherFile, function(err, copiedFile, apiResponse) { + * // `my-bucket` still contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-awesome-image.png" + * + * // Note: + * // The `copiedFile` parameter is equal to `anotherFile`. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.copy(newLocation).then(function(data) { + * const newFile = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_copy_file + * Another example: + */ + copy(destination, optionsOrCallback, callback) { + var _a, _b; + const noDestinationError = new Error(FileExceptionMessages.DESTINATION_NO_NAME); + if (!destination) { + throw noDestinationError; + } + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = { ...optionsOrCallback }; + } + callback = callback || index_js_1.util.noop; + let destBucket; + let destName; + let newFile; + if (typeof destination === 'string') { + const parsedDestination = GS_URL_REGEXP.exec(destination); + if (parsedDestination !== null && parsedDestination.length === 3) { + destBucket = this.storage.bucket(parsedDestination[1]); + destName = parsedDestination[2]; + } + else { + destBucket = this.bucket; + destName = destination; + } + } + else if (destination instanceof bucket_js_1.Bucket) { + destBucket = destination; + destName = this.name; + } + else if (destination instanceof File) { + destBucket = destination.bucket; + destName = destination.name; + newFile = destination; + } + else { + throw noDestinationError; + } + const query = {}; + if (this.generation !== undefined) { + query.sourceGeneration = this.generation; + } + if (options.token !== undefined) { + query.rewriteToken = options.token; + } + if (options.userProject !== undefined) { + query.userProject = options.userProject; + delete options.userProject; + } + if (options.predefinedAcl !== undefined) { + query.destinationPredefinedAcl = options.predefinedAcl; + delete options.predefinedAcl; + } + newFile = newFile || destBucket.file(destName); + const headers = {}; + if (this.encryptionKey !== undefined) { + headers['x-goog-copy-source-encryption-algorithm'] = 'AES256'; + headers['x-goog-copy-source-encryption-key'] = this.encryptionKeyBase64; + headers['x-goog-copy-source-encryption-key-sha256'] = + this.encryptionKeyHash; + } + if (newFile.encryptionKey !== undefined) { + this.setEncryptionKey(newFile.encryptionKey); + } + else if (options.destinationKmsKeyName !== undefined) { + query.destinationKmsKeyName = options.destinationKmsKeyName; + delete options.destinationKmsKeyName; + } + else if (newFile.kmsKeyName !== undefined) { + query.destinationKmsKeyName = newFile.kmsKeyName; + } + if (query.destinationKmsKeyName) { + this.kmsKeyName = query.destinationKmsKeyName; + const keyIndex = this.interceptors.indexOf(this.encryptionKeyInterceptor); + if (keyIndex > -1) { + this.interceptors.splice(keyIndex, 1); + } + } + if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) { + this.storage.retryOptions.autoRetry = false; + } + if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined) { + query.ifGenerationMatch = (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch; + delete options.preconditionOpts; + } + this.request({ + method: 'POST', + uri: `/rewriteTo/b/${destBucket.name}/o/${encodeURIComponent(newFile.name)}`, + qs: query, + json: options, + headers, + }, (err, resp) => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + if (err) { + callback(err, null, resp); + return; + } + if (resp.rewriteToken) { + const options = { + token: resp.rewriteToken, + }; + if (query.userProject) { + options.userProject = query.userProject; + } + if (query.destinationKmsKeyName) { + options.destinationKmsKeyName = query.destinationKmsKeyName; + } + this.copy(newFile, options, callback); + return; + } + callback(null, newFile, resp); + }); + } + /** + * @typedef {object} CreateReadStreamOptions Configuration options for File#createReadStream. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string|boolean} [validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with a + * CRC32c checksum. You may use MD5 if preferred, but that hash is not + * supported for composite objects. An error will be raised if MD5 is + * specified but is not available. You may also choose to skip validation + * completely, however this is **not recommended**. + * @property {number} [start] A byte offset to begin the file's download + * from. Default is 0. NOTE: Byte ranges are inclusive; that is, + * `options.start = 0` and `options.end = 999` represent the first 1000 + * bytes in a file or object. NOTE: when specifying a byte range, data + * integrity is not available. + * @property {number} [end] A byte offset to stop reading the file at. + * NOTE: Byte ranges are inclusive; that is, `options.start = 0` and + * `options.end = 999` represent the first 1000 bytes in a file or object. + * NOTE: when specifying a byte range, data integrity is not available. + * @property {boolean} [decompress=true] Disable auto decompression of the + * received data. By default this option is set to `true`. + * Applicable in cases where the data was uploaded with + * `gzip: true` option. See {@link File#createWriteStream}. + */ + /** + * Create a readable stream to read the contents of the remote file. It can be + * piped to a writable stream or listened to for 'data' events to read a + * file's contents. + * + * In the unlikely event there is a mismatch between what you downloaded and + * the version in your Bucket, your error handler will receive an error with + * code "CONTENT_DOWNLOAD_MISMATCH". If you receive this error, the best + * recourse is to try downloading the file again. + * + * NOTE: Readable streams will emit the `end` event when the file is fully + * downloaded. + * + * @param {CreateReadStreamOptions} [options] Configuration options. + * @returns {ReadableStream} + * + * @example + * ``` + * //- + * //

Downloading a File

+ * // + * // The example below demonstrates how we can reference a remote file, then + * // pipe its contents to a local file. This is effectively creating a local + * // backup of your remote data. + * //- + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * const fs = require('fs'); + * const remoteFile = bucket.file('image.png'); + * const localFilename = '/Users/stephen/Photos/image.png'; + * + * remoteFile.createReadStream() + * .on('error', function(err) {}) + * .on('response', function(response) { + * // Server connected and responded with the specified status and headers. + * }) + * .on('end', function() { + * // The file is fully downloaded. + * }) + * .pipe(fs.createWriteStream(localFilename)); + * + * //- + * // To limit the downloaded data to only a byte range, pass an options + * // object. + * //- + * const logFile = myBucket.file('access_log'); + * logFile.createReadStream({ + * start: 10000, + * end: 20000 + * }) + * .on('error', function(err) {}) + * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt')); + * + * //- + * // To read a tail byte range, specify only `options.end` as a negative + * // number. + * //- + * const logFile = myBucket.file('access_log'); + * logFile.createReadStream({ + * end: -100 + * }) + * .on('error', function(err) {}) + * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt')); + * ``` + */ + createReadStream(options = {}) { + options = Object.assign({ decompress: true }, options); + const rangeRequest = typeof options.start === 'number' || typeof options.end === 'number'; + const tailRequest = options.end < 0; + let validateStream = undefined; + let request = undefined; + const throughStream = new util_js_2.PassThroughShim(); + let crc32c = true; + let md5 = false; + if (typeof options.validation === 'string') { + const value = options.validation.toLowerCase().trim(); + crc32c = value === 'crc32c'; + md5 = value === 'md5'; + } + else if (options.validation === false) { + crc32c = false; + } + const shouldRunValidation = !rangeRequest && (crc32c || md5); + if (rangeRequest) { + if (typeof options.validation === 'string' || + options.validation === true) { + throw new Error(FileExceptionMessages.INVALID_VALIDATION_FILE_RANGE); + } + // Range requests can't receive data integrity checks. + crc32c = false; + md5 = false; + } + const onComplete = (err) => { + if (err) { + // There is an issue with node-fetch 2.x that if the stream errors the underlying socket connection is not closed. + // This causes a memory leak, so cleanup the sockets manually here by destroying the agent. + if (request === null || request === void 0 ? void 0 : request.agent) { + request.agent.destroy(); + } + throughStream.destroy(err); + } + }; + // We listen to the response event from the request stream so that we + // can... + // + // 1) Intercept any data from going to the user if an error occurred. + // 2) Calculate the hashes from the http.IncomingMessage response + // stream, + // which will return the bytes from the source without decompressing + // gzip'd content. We then send it through decompressed, if + // applicable, to the user. + const onResponse = (err, _body, rawResponseStream) => { + if (err) { + // Get error message from the body. + this.getBufferFromReadable(rawResponseStream).then(body => { + err.message = body.toString('utf8'); + throughStream.destroy(err); + }); + return; + } + request = rawResponseStream.request; + const headers = rawResponseStream.toJSON().headers; + const isCompressed = headers['content-encoding'] === 'gzip'; + const hashes = {}; + // The object is safe to validate if: + // 1. It was stored gzip and returned to us gzip OR + // 2. It was never stored as gzip + const safeToValidate = (headers['x-goog-stored-content-encoding'] === 'gzip' && + isCompressed) || + headers['x-goog-stored-content-encoding'] === 'identity'; + const transformStreams = []; + if (shouldRunValidation) { + // The x-goog-hash header should be set with a crc32c and md5 hash. + // ex: headers['x-goog-hash'] = 'crc32c=xxxx,md5=xxxx' + if (typeof headers['x-goog-hash'] === 'string') { + headers['x-goog-hash'] + .split(',') + .forEach((hashKeyValPair) => { + const delimiterIndex = hashKeyValPair.indexOf('='); + const hashType = hashKeyValPair.substring(0, delimiterIndex); + const hashValue = hashKeyValPair.substring(delimiterIndex + 1); + hashes[hashType] = hashValue; + }); + } + validateStream = new hash_stream_validator_js_1.HashStreamValidator({ + crc32c, + md5, + crc32cGenerator: this.crc32cGenerator, + crc32cExpected: hashes.crc32c, + md5Expected: hashes.md5, + }); + } + if (md5 && !hashes.md5) { + const hashError = new RequestError(FileExceptionMessages.MD5_NOT_AVAILABLE); + hashError.code = 'MD5_NOT_AVAILABLE'; + throughStream.destroy(hashError); + return; + } + if (safeToValidate && shouldRunValidation && validateStream) { + transformStreams.push(validateStream); + } + if (isCompressed && options.decompress) { + transformStreams.push(zlib.createGunzip()); + } + (0, stream_1.pipeline)(rawResponseStream, ...transformStreams, throughStream, onComplete); + }; + // Authenticate the request, then pipe the remote API request to the stream + // returned to the user. + const makeRequest = () => { + const query = { alt: 'media' }; + if (this.generation) { + query.generation = this.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + const headers = { + 'Accept-Encoding': 'gzip', + 'Cache-Control': 'no-store', + }; + if (rangeRequest) { + const start = typeof options.start === 'number' ? options.start : '0'; + const end = typeof options.end === 'number' ? options.end : ''; + headers.Range = `bytes=${tailRequest ? end : `${start}-${end}`}`; + } + const reqOpts = { + uri: '', + headers, + qs: query, + }; + if (options[util_js_1.GCCL_GCS_CMD_KEY]) { + reqOpts[util_js_1.GCCL_GCS_CMD_KEY] = options[util_js_1.GCCL_GCS_CMD_KEY]; + } + this.requestStream(reqOpts) + .on('error', err => { + throughStream.destroy(err); + }) + .on('response', res => { + throughStream.emit('response', res); + index_js_1.util.handleResp(null, res, null, onResponse); + }) + .resume(); + }; + throughStream.on('reading', makeRequest); + return throughStream; + } + /** + * @callback CreateResumableUploadCallback + * @param {?Error} err Request error, if any. + * @param {string} uri The resumable upload's unique session URI. + */ + /** + * @typedef {array} CreateResumableUploadResponse + * @property {string} 0 The resumable upload's unique session URI. + */ + /** + * @typedef {object} CreateResumableUploadOptions + * @property {object} [metadata] Metadata to set on the file. + * @property {number} [offset] The starting byte of the upload stream for resuming an interrupted upload. + * @property {string} [origin] Origin header to set for the upload. + * @property {string} [predefinedAcl] Apply a predefined set of access + * controls to this object. + * + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @property {boolean} [private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @property {boolean} [public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string} [chunkSize] Create a separate request per chunk. This + * value is in bytes and should be a multiple of 256 KiB (2^18). + * {@link https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload| We recommend using at least 8 MiB for the chunk size.} + */ + /** + * Create a unique resumable upload session URI. This is the first step when + * performing a resumable upload. + * + * See the {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload| Resumable upload guide} + * for more on how the entire process works. + * + *

Note

+ * + * If you are just looking to perform a resumable upload without worrying + * about any of the details, see {@link File#createWriteStream}. Resumable + * uploads are performed by default. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload| Resumable upload guide} + * + * @param {CreateResumableUploadOptions} [options] Configuration options. + * @param {CreateResumableUploadCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * file.createResumableUpload(function(err, uri) { + * if (!err) { + * // `uri` can be used to PUT data to. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.createResumableUpload().then(function(data) { + * const uri = data[0]; + * }); + * ``` + */ + createResumableUpload(optionsOrCallback, callback) { + var _a, _b; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const retryOptions = this.storage.retryOptions; + if ((((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined && + ((_b = this.instancePreconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + retryOptions.autoRetry = false; + } + resumableUpload.createURI({ + authClient: this.storage.authClient, + apiEndpoint: this.storage.apiEndpoint, + bucket: this.bucket.name, + customRequestOptions: this.getRequestInterceptors().reduce((reqOpts, interceptorFn) => interceptorFn(reqOpts), {}), + file: this.name, + generation: this.generation, + key: this.encryptionKey, + kmsKeyName: this.kmsKeyName, + metadata: options.metadata, + offset: options.offset, + origin: options.origin, + predefinedAcl: options.predefinedAcl, + private: options.private, + public: options.public, + userProject: options.userProject || this.userProject, + retryOptions: retryOptions, + params: (options === null || options === void 0 ? void 0 : options.preconditionOpts) || this.instancePreconditionOpts, + universeDomain: this.bucket.storage.universeDomain, + [util_js_1.GCCL_GCS_CMD_KEY]: options[util_js_1.GCCL_GCS_CMD_KEY], + }, callback); + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + } + /** + * @typedef {object} CreateWriteStreamOptions Configuration options for File#createWriteStream(). + * @property {string} [contentType] Alias for + * `options.metadata.contentType`. If set to `auto`, the file name is used + * to determine the contentType. + * @property {string|boolean} [gzip] If true, automatically gzip the file. + * If set to `auto`, the contentType is used to determine if the file + * should be gzipped. This will set `options.metadata.contentEncoding` to + * `gzip` if necessary. + * @property {object} [metadata] See the examples below or + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body} + * for more details. + * @property {number} [offset] The starting byte of the upload stream, for + * resuming an interrupted upload. Defaults to 0. + * @property {string} [predefinedAcl] Apply a predefined set of access + * controls to this object. + * + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @property {boolean} [private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @property {boolean} [public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @property {boolean} [resumable] Force a resumable upload. NOTE: When + * working with streams, the file format and size is unknown until it's + * completely consumed. Because of this, it's best for you to be explicit + * for what makes sense given your input. + * @property {number} [timeout=60000] Set the HTTP request timeout in + * milliseconds. This option is not available for resumable uploads. + * Default: `60000` + * @property {string} [uri] The URI for an already-created resumable + * upload. See {@link File#createResumableUpload}. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string|boolean} [validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with a + * CRC32c checksum. You may use MD5 if preferred, but that hash is not + * supported for composite objects. An error will be raised if MD5 is + * specified but is not available. You may also choose to skip validation + * completely, however this is **not recommended**. In addition to specifying + * validation type, providing `metadata.crc32c` or `metadata.md5Hash` will + * cause the server to perform validation in addition to client validation. + * NOTE: Validation is automatically skipped for objects that were + * uploaded using the `gzip` option and have already compressed content. + */ + /** + * Create a writable stream to overwrite the contents of the file in your + * bucket. + * + * A File object can also be used to create files for the first time. + * + * Resumable uploads are automatically enabled and must be shut off explicitly + * by setting `options.resumable` to `false`. + * + * + *

+ * There is some overhead when using a resumable upload that can cause + * noticeable performance degradation while uploading a series of small + * files. When uploading files less than 10MB, it is recommended that the + * resumable feature is disabled. + *

+ * + * NOTE: Writable streams will emit the `finish` event when the file is fully + * uploaded. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload Upload Options (Simple or Resumable)} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert Objects: insert API Documentation} + * + * @param {CreateWriteStreamOptions} [options] Configuration options. + * @returns {WritableStream} + * + * @example + * ``` + * const fs = require('fs'); + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * //

Uploading a File

+ * // + * // Now, consider a case where we want to upload a file to your bucket. You + * // have the option of using {@link Bucket#upload}, but that is just + * // a convenience method which will do the following. + * //- + * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg') + * .pipe(file.createWriteStream()) + * .on('error', function(err) {}) + * .on('finish', function() { + * // The file upload is complete. + * }); + * + * //- + * //

Uploading a File with gzip compression

+ * //- + * fs.createReadStream('/Users/stephen/site/index.html') + * .pipe(file.createWriteStream({ gzip: true })) + * .on('error', function(err) {}) + * .on('finish', function() { + * // The file upload is complete. + * }); + * + * //- + * // Downloading the file with `createReadStream` will automatically decode + * // the file. + * //- + * + * //- + * //

Uploading a File with Metadata

+ * // + * // One last case you may run into is when you want to upload a file to your + * // bucket and set its metadata at the same time. Like above, you can use + * // {@link Bucket#upload} to do this, which is just a wrapper around + * // the following. + * //- + * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg') + * .pipe(file.createWriteStream({ + * metadata: { + * contentType: 'image/jpeg', + * metadata: { + * custom: 'metadata' + * } + * } + * })) + * .on('error', function(err) {}) + * .on('finish', function() { + * // The file upload is complete. + * }); + * ``` + * + * //- + * //

Continuing a Resumable Upload

+ * // + * // One can capture a `uri` from a resumable upload to reuse later. + * // Additionally, for validation, one can also capture and pass `crc32c`. + * //- + * let uri: string | undefined = undefined; + * let resumeCRC32C: string | undefined = undefined; + * + * fs.createWriteStream() + * .on('uri', link => {uri = link}) + * .on('crc32', crc32c => {resumeCRC32C = crc32c}); + * + * // later... + * fs.createWriteStream({uri, resumeCRC32C}); + */ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + createWriteStream(options = {}) { + var _a; + (_a = options.metadata) !== null && _a !== void 0 ? _a : (options.metadata = {}); + if (options.contentType) { + options.metadata.contentType = options.contentType; + } + if (!options.metadata.contentType || + options.metadata.contentType === 'auto') { + const detectedContentType = mime_1.default.getType(this.name); + if (detectedContentType) { + options.metadata.contentType = detectedContentType; + } + } + let gzip = options.gzip; + if (gzip === 'auto') { + gzip = COMPRESSIBLE_MIME_REGEX.test(options.metadata.contentType || ''); + } + if (gzip) { + options.metadata.contentEncoding = 'gzip'; + } + let crc32c = true; + let md5 = false; + if (typeof options.validation === 'string') { + options.validation = options.validation.toLowerCase(); + crc32c = options.validation === 'crc32c'; + md5 = options.validation === 'md5'; + } + else if (options.validation === false) { + crc32c = false; + md5 = false; + } + if (options.offset) { + if (md5) { + throw new RangeError(FileExceptionMessages.MD5_RESUMED_UPLOAD); + } + if (crc32c && !options.isPartialUpload && !options.resumeCRC32C) { + throw new RangeError(FileExceptionMessages.MISSING_RESUME_CRC32C_FINAL_UPLOAD); + } + } + /** + * A callback for determining when the underlying pipeline is complete. + * It's possible the pipeline callback could error before the write stream + * calls `final` so by default this will destroy the write stream unless the + * write stream sets this callback via its `final` handler. + * @param error An optional error + */ + let pipelineCallback = error => { + writeStream.destroy(error || undefined); + }; + // A stream for consumer to write to + const writeStream = new stream_1.Writable({ + final(cb) { + // Set the pipeline callback to this callback so the pipeline's results + // can be populated to the consumer + pipelineCallback = cb; + emitStream.end(); + }, + write(chunk, encoding, cb) { + emitStream.write(chunk, encoding, cb); + }, + }); + // If the write stream, which is returned to the caller, catches an error we need to make sure that + // at least one of the streams in the pipeline below gets notified so that they + // all get cleaned up / destroyed. + writeStream.once('error', e => { + emitStream.destroy(e); + }); + // If the write stream is closed, cleanup the pipeline below by calling destroy on one of the streams. + writeStream.once('close', () => { + emitStream.destroy(); + }); + const transformStreams = []; + if (gzip) { + transformStreams.push(zlib.createGzip()); + } + const emitStream = new util_js_2.PassThroughShim(); + let hashCalculatingStream = null; + if (crc32c || md5) { + const crc32cInstance = options.resumeCRC32C + ? crc32c_js_1.CRC32C.from(options.resumeCRC32C) + : undefined; + hashCalculatingStream = new hash_stream_validator_js_1.HashStreamValidator({ + crc32c, + crc32cInstance, + md5, + crc32cGenerator: this.crc32cGenerator, + updateHashesOnly: true, + }); + transformStreams.push(hashCalculatingStream); + } + const fileWriteStream = (0, duplexify_1.default)(); + let fileWriteStreamMetadataReceived = false; + // Handing off emitted events to users + emitStream.on('reading', () => writeStream.emit('reading')); + emitStream.on('writing', () => writeStream.emit('writing')); + fileWriteStream.on('uri', evt => writeStream.emit('uri', evt)); + fileWriteStream.on('progress', evt => writeStream.emit('progress', evt)); + fileWriteStream.on('response', resp => writeStream.emit('response', resp)); + fileWriteStream.once('metadata', () => { + fileWriteStreamMetadataReceived = true; + }); + writeStream.once('writing', () => { + if (options.resumable === false) { + this.startSimpleUpload_(fileWriteStream, options); + } + else { + this.startResumableUpload_(fileWriteStream, options); + } + (0, stream_1.pipeline)(emitStream, ...transformStreams, fileWriteStream, async (e) => { + if (e) { + return pipelineCallback(e); + } + // We want to make sure we've received the metadata from the server in order + // to properly validate the object's integrity. Depending on the type of upload, + // the stream could close before the response is returned. + if (!fileWriteStreamMetadataReceived) { + try { + await new Promise((resolve, reject) => { + fileWriteStream.once('metadata', resolve); + fileWriteStream.once('error', reject); + }); + } + catch (e) { + return pipelineCallback(e); + } + } + // Emit the local CRC32C value for future validation, if validation is enabled. + if (hashCalculatingStream === null || hashCalculatingStream === void 0 ? void 0 : hashCalculatingStream.crc32c) { + writeStream.emit('crc32c', hashCalculatingStream.crc32c); + } + try { + // Metadata may not be ready if the upload is a partial upload, + // nothing to validate yet. + const metadataNotReady = options.isPartialUpload && !this.metadata; + if (hashCalculatingStream && !metadataNotReady) { + await __classPrivateFieldGet(this, _File_instances, "m", _File_validateIntegrity).call(this, hashCalculatingStream, { + crc32c, + md5, + }); + } + pipelineCallback(); + } + catch (e) { + pipelineCallback(e); + } + }); + }); + return writeStream; + } + delete(optionsOrCallback, cb) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + this.disableAutoRetryConditionallyIdempotent_(this.methods.delete, bucket_js_1.AvailableServiceObjectMethods.delete, options); + super + .delete(options) + .then(resp => cb(null, ...resp)) + .catch(cb) + .finally(() => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + }); + } + /** + * @typedef {array} DownloadResponse + * @property [0] The contents of a File. + */ + /** + * @callback DownloadCallback + * @param err Request error, if any. + * @param contents The contents of a File. + */ + /** + * Convenience method to download a file into memory or to a local + * destination. + * + * @param {object} [options] Configuration options. The arguments match those + * passed to {@link File#createReadStream}. + * @param {string} [options.destination] Local file path to write the file's + * contents to. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {DownloadCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * // Download a file into memory. The contents will be available as the + * second + * // argument in the demonstration below, `contents`. + * //- + * file.download(function(err, contents) {}); + * + * //- + * // Download a file to a local destination. + * //- + * file.download({ + * destination: '/Users/me/Desktop/file-backup.txt' + * }, function(err) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.download().then(function(data) { + * const contents = data[0]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_download_file + * Another example: + * + * @example include:samples/encryption.js + * region_tag:storage_download_encrypted_file + * Example of downloading an encrypted file: + * + * @example include:samples/requesterPays.js + * region_tag:storage_download_file_requester_pays + * Example of downloading a file where the requester pays: + */ + download(optionsOrCallback, cb) { + let options; + if (typeof optionsOrCallback === 'function') { + cb = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + let called = false; + const callback = ((...args) => { + if (!called) + cb(...args); + called = true; + }); + const destination = options.destination; + delete options.destination; + const fileStream = this.createReadStream(options); + let receivedData = false; + if (destination) { + fileStream + .on('error', callback) + .once('data', data => { + receivedData = true; + // We know that the file exists the server - now we can truncate/write to a file + const writable = fs.createWriteStream(destination); + writable.write(data); + fileStream + .pipe(writable) + .on('error', (err) => { + callback(err, Buffer.from('')); + }) + .on('finish', () => { + callback(null, data); + }); + }) + .on('end', () => { + // In the case of an empty file no data will be received before the end event fires + if (!receivedData) { + const data = Buffer.alloc(0); + try { + fs.writeFileSync(destination, data); + callback(null, data); + } + catch (e) { + callback(e, data); + } + } + }); + } + else { + this.getBufferFromReadable(fileStream) + .then(contents => callback === null || callback === void 0 ? void 0 : callback(null, contents)) + .catch(callback); + } + } + /** + * The Storage API allows you to use a custom key for server-side encryption. + * + * See {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys} + * + * @param {string|buffer} encryptionKey An AES-256 encryption key. + * @returns {File} + * + * @example + * ``` + * const crypto = require('crypto'); + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const encryptionKey = crypto.randomBytes(32); + * + * const fileWithCustomEncryption = myBucket.file('my-file'); + * fileWithCustomEncryption.setEncryptionKey(encryptionKey); + * + * const fileWithoutCustomEncryption = myBucket.file('my-file'); + * + * fileWithCustomEncryption.save('data', function(err) { + * // Try to download with the File object that hasn't had + * // `setEncryptionKey()` called: + * fileWithoutCustomEncryption.download(function(err) { + * // We will receive an error: + * // err.message === 'Bad Request' + * + * // Try again with the File object we called `setEncryptionKey()` on: + * fileWithCustomEncryption.download(function(err, contents) { + * // contents.toString() === 'data' + * }); + * }); + * }); + * + * ``` + * @example include:samples/encryption.js + * region_tag:storage_upload_encrypted_file + * Example of uploading an encrypted file: + * + * @example include:samples/encryption.js + * region_tag:storage_download_encrypted_file + * Example of downloading an encrypted file: + */ + setEncryptionKey(encryptionKey) { + this.encryptionKey = encryptionKey; + this.encryptionKeyBase64 = Buffer.from(encryptionKey).toString('base64'); + this.encryptionKeyHash = crypto + .createHash('sha256') + // eslint-disable-next-line @typescript-eslint/no-explicit-any + .update(this.encryptionKeyBase64, 'base64') + .digest('base64'); + this.encryptionKeyInterceptor = { + request: reqOpts => { + reqOpts.headers = reqOpts.headers || {}; + reqOpts.headers['x-goog-encryption-algorithm'] = 'AES256'; + reqOpts.headers['x-goog-encryption-key'] = this.encryptionKeyBase64; + reqOpts.headers['x-goog-encryption-key-sha256'] = + this.encryptionKeyHash; + return reqOpts; + }, + }; + this.interceptors.push(this.encryptionKeyInterceptor); + return this; + } + /** + * Gets a reference to a Cloud Storage {@link File} file from the provided URL in string format. + * @param {string} publicUrlOrGsUrl the URL as a string. Must be of the format gs://bucket/file + * or https://storage.googleapis.com/bucket/file. + * @param {Storage} storageInstance an instance of a Storage object. + * @param {FileOptions} [options] Configuration options + * @returns {File} + */ + static from(publicUrlOrGsUrl, storageInstance, options) { + const gsMatches = [...publicUrlOrGsUrl.matchAll(GS_UTIL_URL_REGEX)]; + const httpsMatches = [...publicUrlOrGsUrl.matchAll(HTTPS_PUBLIC_URL_REGEX)]; + if (gsMatches.length > 0) { + const bucket = new bucket_js_1.Bucket(storageInstance, gsMatches[0][2]); + return new File(bucket, gsMatches[0][3], options); + } + else if (httpsMatches.length > 0) { + const bucket = new bucket_js_1.Bucket(storageInstance, httpsMatches[0][3]); + return new File(bucket, httpsMatches[0][4], options); + } + else { + throw new Error('URL string must be of format gs://bucket/file or https://storage.googleapis.com/bucket/file'); + } + } + get(optionsOrCallback, cb) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = + typeof optionsOrCallback === 'function' + ? optionsOrCallback + : cb; + super + .get(options) + .then(resp => cb(null, ...resp)) + .catch(cb); + } + /** + * @typedef {array} GetExpirationDateResponse + * @property {date} 0 A Date object representing the earliest time this file's + * retention policy will expire. + */ + /** + * @callback GetExpirationDateCallback + * @param {?Error} err Request error, if any. + * @param {date} expirationDate A Date object representing the earliest time + * this file's retention policy will expire. + */ + /** + * If this bucket has a retention policy defined, use this method to get a + * Date object representing the earliest time this file will expire. + * + * @param {GetExpirationDateCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.getExpirationDate(function(err, expirationDate) { + * // expirationDate is a Date object. + * }); + * ``` + */ + getExpirationDate(callback) { + this.getMetadata((err, metadata, apiResponse) => { + if (err) { + callback(err, null, apiResponse); + return; + } + if (!metadata.retentionExpirationTime) { + const error = new Error(FileExceptionMessages.EXPIRATION_TIME_NA); + callback(error, null, apiResponse); + return; + } + callback(null, new Date(metadata.retentionExpirationTime), apiResponse); + }); + } + /** + * @typedef {array} GenerateSignedPostPolicyV2Response + * @property {object} 0 The document policy. + */ + /** + * @callback GenerateSignedPostPolicyV2Callback + * @param {?Error} err Request error, if any. + * @param {object} policy The document policy. + */ + /** + * Get a signed policy document to allow a user to upload data with a POST + * request. + * + * In Google Cloud Platform environments, such as Cloud Functions and App + * Engine, you usually don't provide a `keyFilename` or `credentials` during + * instantiation. In those environments, we call the + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} + * to create a signed policy. That API requires either the + * `https://www.googleapis.com/auth/iam` or + * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are + * enabled. + * + * See {@link https://cloud.google.com/storage/docs/xml-api/post-object-v2| POST Object with the V2 signing process} + * + * @throws {Error} If an expiration timestamp from the past is given. + * @throws {Error} If options.equals has an array with less or more than two + * members. + * @throws {Error} If options.startsWith has an array with less or more than two + * members. + * + * @param {object} options Configuration options. + * @param {array|array[]} [options.equals] Array of request parameters and + * their expected value (e.g. [['$', '']]). Values are + * translated into equality constraints in the conditions field of the + * policy document (e.g. ['eq', '$', '']). If only one + * equality condition is to be specified, options.equals can be a one- + * dimensional array (e.g. ['$', '']). + * @param {*} options.expires - A timestamp when this policy will expire. Any + * value given is passed to `new Date()`. + * @param {array|array[]} [options.startsWith] Array of request parameters and + * their expected prefixes (e.g. [['$', '']). Values are + * translated into starts-with constraints in the conditions field of the + * policy document (e.g. ['starts-with', '$', '']). If only + * one prefix condition is to be specified, options.startsWith can be a + * one- dimensional array (e.g. ['$', '']). + * @param {string} [options.acl] ACL for the object from possibly predefined + * ACLs. + * @param {string} [options.successRedirect] The URL to which the user client + * is redirected if the upload is successful. + * @param {string} [options.successStatus] - The status of the Google Storage + * response if the upload is successful (must be string). + * @param {object} [options.contentLengthRange] + * @param {number} [options.contentLengthRange.min] Minimum value for the + * request's content length. + * @param {number} [options.contentLengthRange.max] Maximum value for the + * request's content length. + * @param {GenerateSignedPostPolicyV2Callback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * const options = { + * equals: ['$Content-Type', 'image/jpeg'], + * expires: '10-25-2022', + * contentLengthRange: { + * min: 0, + * max: 1024 + * } + * }; + * + * file.generateSignedPostPolicyV2(options, function(err, policy) { + * // policy.string: the policy document in plain text. + * // policy.base64: the policy document in base64. + * // policy.signature: the policy signature in base64. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.generateSignedPostPolicyV2(options).then(function(data) { + * const policy = data[0]; + * }); + * ``` + */ + generateSignedPostPolicyV2(optionsOrCallback, cb) { + const args = (0, util_js_2.normalize)(optionsOrCallback, cb); + let options = args.options; + const callback = args.callback; + const expires = new Date(options.expires); + if (isNaN(expires.getTime())) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_INVALID); + } + if (expires.valueOf() < Date.now()) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_PAST); + } + options = Object.assign({}, options); + const conditions = [ + ['eq', '$key', this.name], + { + bucket: this.bucket.name, + }, + ]; + if (Array.isArray(options.equals)) { + if (!Array.isArray(options.equals[0])) { + options.equals = [options.equals]; + } + options.equals.forEach(condition => { + if (!Array.isArray(condition) || condition.length !== 2) { + throw new Error(FileExceptionMessages.EQUALS_CONDITION_TWO_ELEMENTS); + } + conditions.push(['eq', condition[0], condition[1]]); + }); + } + if (Array.isArray(options.startsWith)) { + if (!Array.isArray(options.startsWith[0])) { + options.startsWith = [options.startsWith]; + } + options.startsWith.forEach(condition => { + if (!Array.isArray(condition) || condition.length !== 2) { + throw new Error(FileExceptionMessages.STARTS_WITH_TWO_ELEMENTS); + } + conditions.push(['starts-with', condition[0], condition[1]]); + }); + } + if (options.acl) { + conditions.push({ + acl: options.acl, + }); + } + if (options.successRedirect) { + conditions.push({ + success_action_redirect: options.successRedirect, + }); + } + if (options.successStatus) { + conditions.push({ + success_action_status: options.successStatus, + }); + } + if (options.contentLengthRange) { + const min = options.contentLengthRange.min; + const max = options.contentLengthRange.max; + if (typeof min !== 'number' || typeof max !== 'number') { + throw new Error(FileExceptionMessages.CONTENT_LENGTH_RANGE_MIN_MAX); + } + conditions.push(['content-length-range', min, max]); + } + const policy = { + expiration: expires.toISOString(), + conditions, + }; + const policyString = JSON.stringify(policy); + const policyBase64 = Buffer.from(policyString).toString('base64'); + this.storage.authClient.sign(policyBase64, options.signingEndpoint).then(signature => { + callback(null, { + string: policyString, + base64: policyBase64, + signature, + }); + }, err => { + callback(new signer_js_1.SigningError(err.message)); + }); + } + /** + * @typedef {object} SignedPostPolicyV4Output + * @property {string} url The request URL. + * @property {object} fields The form fields to include in the POST request. + */ + /** + * @typedef {array} GenerateSignedPostPolicyV4Response + * @property {SignedPostPolicyV4Output} 0 An object containing the request URL and form fields. + */ + /** + * @callback GenerateSignedPostPolicyV4Callback + * @param {?Error} err Request error, if any. + * @param {SignedPostPolicyV4Output} output An object containing the request URL and form fields. + */ + /** + * Get a v4 signed policy document to allow a user to upload data with a POST + * request. + * + * In Google Cloud Platform environments, such as Cloud Functions and App + * Engine, you usually don't provide a `keyFilename` or `credentials` during + * instantiation. In those environments, we call the + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} + * to create a signed policy. That API requires either the + * `https://www.googleapis.com/auth/iam` or + * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are + * enabled. + * + * See {@link https://cloud.google.com/storage/docs/xml-api/post-object#policydocument| Policy Document Reference} + * + * @param {object} options Configuration options. + * @param {Date|number|string} options.expires - A timestamp when this policy will expire. Any + * value given is passed to `new Date()`. + * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style + * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style + * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs + * should generally be preferred instead of path-style URL. + * Currently defaults to `false` for path-style, although this may change in a + * future major-version release. + * @param {string} [config.bucketBoundHostname] The bucket-bound hostname to return in + * the result, e.g. "https://cdn.example.com". + * @param {object} [config.fields] [Form fields]{@link https://cloud.google.com/storage/docs/xml-api/post-object#policydocument} + * to include in the signed policy. Any fields with key beginning with 'x-ignore-' + * will not be included in the policy to be signed. + * @param {object[]} [config.conditions] [Conditions]{@link https://cloud.google.com/storage/docs/authentication/signatures#policy-document} + * to include in the signed policy. All fields given in `config.fields` are + * automatically included in the conditions array, adding the same entry + * in both `fields` and `conditions` will result in duplicate entries. + * + * @param {GenerateSignedPostPolicyV4Callback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * const options = { + * expires: '10-25-2022', + * conditions: [ + * ['eq', '$Content-Type', 'image/jpeg'], + * ['content-length-range', 0, 1024], + * ], + * fields: { + * acl: 'public-read', + * 'x-goog-meta-foo': 'bar', + * 'x-ignore-mykey': 'data' + * } + * }; + * + * file.generateSignedPostPolicyV4(options, function(err, response) { + * // response.url The request URL + * // response.fields The form fields (including the signature) to include + * // to be used to upload objects by HTML forms. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.generateSignedPostPolicyV4(options).then(function(data) { + * const response = data[0]; + * // response.url The request URL + * // response.fields The form fields (including the signature) to include + * // to be used to upload objects by HTML forms. + * }); + * ``` + */ + generateSignedPostPolicyV4(optionsOrCallback, cb) { + const args = (0, util_js_2.normalize)(optionsOrCallback, cb); + let options = args.options; + const callback = args.callback; + const expires = new Date(options.expires); + if (isNaN(expires.getTime())) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_INVALID); + } + if (expires.valueOf() < Date.now()) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_PAST); + } + if (expires.valueOf() - Date.now() > SEVEN_DAYS * 1000) { + throw new Error(`Max allowed expiration is seven days (${SEVEN_DAYS} seconds).`); + } + options = Object.assign({}, options); + let fields = Object.assign({}, options.fields); + const now = new Date(); + const nowISO = (0, util_js_2.formatAsUTCISO)(now, true); + const todayISO = (0, util_js_2.formatAsUTCISO)(now); + const sign = async () => { + const { client_email } = await this.storage.authClient.getCredentials(); + const credential = `${client_email}/${todayISO}/auto/storage/goog4_request`; + fields = { + ...fields, + bucket: this.bucket.name, + key: this.name, + 'x-goog-date': nowISO, + 'x-goog-credential': credential, + 'x-goog-algorithm': 'GOOG4-RSA-SHA256', + }; + const conditions = options.conditions || []; + Object.entries(fields).forEach(([key, value]) => { + if (!key.startsWith('x-ignore-')) { + conditions.push({ [key]: value }); + } + }); + delete fields.bucket; + const expiration = (0, util_js_2.formatAsUTCISO)(expires, true, '-', ':'); + const policy = { + conditions, + expiration, + }; + const policyString = (0, util_js_2.unicodeJSONStringify)(policy); + const policyBase64 = Buffer.from(policyString).toString('base64'); + try { + const signature = await this.storage.authClient.sign(policyBase64, options.signingEndpoint); + const signatureHex = Buffer.from(signature, 'base64').toString('hex'); + const universe = this.parent.storage.universeDomain; + fields['policy'] = policyBase64; + fields['x-goog-signature'] = signatureHex; + let url; + if (this.storage.customEndpoint) { + url = this.storage.apiEndpoint; + } + else if (options.virtualHostedStyle) { + url = `https://${this.bucket.name}.storage.${universe}/`; + } + else if (options.bucketBoundHostname) { + url = `${options.bucketBoundHostname}/`; + } + else { + url = `https://storage.${universe}/${this.bucket.name}/`; + } + return { + url, + fields, + }; + } + catch (err) { + throw new signer_js_1.SigningError(err.message); + } + }; + sign().then(res => callback(null, res), callback); + } + /** + * @typedef {array} GetSignedUrlResponse + * @property {object} 0 The signed URL. + */ + /** + * @callback GetSignedUrlCallback + * @param {?Error} err Request error, if any. + * @param {object} url The signed URL. + */ + /** + * Get a signed URL to allow limited time access to the file. + * + * In Google Cloud Platform environments, such as Cloud Functions and App + * Engine, you usually don't provide a `keyFilename` or `credentials` during + * instantiation. In those environments, we call the + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} + * to create a signed URL. That API requires either the + * `https://www.googleapis.com/auth/iam` or + * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are + * enabled. + * + * See {@link https://cloud.google.com/storage/docs/access-control/signed-urls| Signed URLs Reference} + * + * @throws {Error} if an expiration timestamp from the past is given. + * + * @param {object} config Configuration object. + * @param {string} config.action "read" (HTTP: GET), "write" (HTTP: PUT), or + * "delete" (HTTP: DELETE), "resumable" (HTTP: POST). + * When using "resumable", the header `X-Goog-Resumable: start` has + * to be sent when making a request with the signed URL. + * @param {*} config.expires A timestamp when this link will expire. Any value + * given is passed to `new Date()`. + * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now. + * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example} + * @param {string} [config.version='v2'] The signing version to use, either + * 'v2' or 'v4'. + * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style + * URLs (e.g. 'https://mybucket.storage.googleapis.com/...') instead of path-style + * (e.g. 'https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs + * should generally be preferred instaed of path-style URL. + * Currently defaults to `false` for path-style, although this may change in a + * future major-version release. + * @param {string} [config.cname] The cname for this bucket, i.e., + * "https://cdn.example.com". + * @param {string} [config.contentMd5] The MD5 digest value in base64. Just like + * if you provide this, the client must provide this HTTP header with this same + * value in its request, so to if this parameter is not provided here, + * the client must not provide any value for this HTTP header in its request. + * @param {string} [config.contentType] Just like if you provide this, the client + * must provide this HTTP header with this same value in its request, so to if + * this parameter is not provided here, the client must not provide any value + * for this HTTP header in its request. + * @param {object} [config.extensionHeaders] If these headers are used, the + * server will check to make sure that the client provides matching + * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers} + * for the requirements of this feature, most notably: + * - The header name must be prefixed with `x-goog-` + * - The header name must be all lowercase + * + * Note: Multi-valued header passed as an array in the extensionHeaders + * object is converted into a string, delimited by `,` with + * no space. Requests made using the signed URL will need to + * delimit multi-valued headers using a single `,` as well, or + * else the server will report a mismatched signature. + * @param {object} [config.queryParams] Additional query parameters to include + * in the signed URL. + * @param {string} [config.promptSaveAs] The filename to prompt the user to + * save the file as when the signed url is accessed. This is ignored if + * `config.responseDisposition` is set. + * @param {string} [config.responseDisposition] The + * {@link http://goo.gl/yMWxQV| response-content-disposition parameter} of the + * signed url. + * @param {*} [config.accessibleAt=Date.now()] A timestamp when this link became usable. Any value + * given is passed to `new Date()`. + * Note: Use for 'v4' only. + * @param {string} [config.responseType] The response-content-type parameter + * of the signed url. + * @param {GetSignedUrlCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * // Generate a URL that allows temporary access to download your file. + * //- + * const request = require('request'); + * + * const config = { + * action: 'read', + * expires: '03-17-2025', + * }; + * + * file.getSignedUrl(config, function(err, url) { + * if (err) { + * console.error(err); + * return; + * } + * + * // The file is now available to read from this URL. + * request(url, function(err, resp) { + * // resp.statusCode = 200 + * }); + * }); + * + * //- + * // Generate a URL that allows temporary access to download your file. + * // Access will begin at accessibleAt and end at expires. + * //- + * const request = require('request'); + * + * const config = { + * action: 'read', + * expires: '03-17-2025', + * accessibleAt: '03-13-2025' + * }; + * + * file.getSignedUrl(config, function(err, url) { + * if (err) { + * console.error(err); + * return; + * } + * + * // The file will be available to read from this URL from 03-13-2025 to 03-17-2025. + * request(url, function(err, resp) { + * // resp.statusCode = 200 + * }); + * }); + * + * //- + * // Generate a URL to allow write permissions. This means anyone with this + * URL + * // can send a POST request with new data that will overwrite the file. + * //- + * file.getSignedUrl({ + * action: 'write', + * expires: '03-17-2025' + * }, function(err, url) { + * if (err) { + * console.error(err); + * return; + * } + * + * // The file is now available to be written to. + * const writeStream = request.put(url); + * writeStream.end('New data'); + * + * writeStream.on('complete', function(resp) { + * // Confirm the new content was saved. + * file.download(function(err, fileContents) { + * console.log('Contents:', fileContents.toString()); + * // Contents: New data + * }); + * }); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.getSignedUrl(config).then(function(data) { + * const url = data[0]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_generate_signed_url + * Another example: + */ + getSignedUrl(cfg, callback) { + const method = ActionToHTTPMethod[cfg.action]; + const extensionHeaders = (0, util_js_2.objectKeyToLowercase)(cfg.extensionHeaders || {}); + if (cfg.action === 'resumable') { + extensionHeaders['x-goog-resumable'] = 'start'; + } + const queryParams = Object.assign({}, cfg.queryParams); + if (typeof cfg.responseType === 'string') { + queryParams['response-content-type'] = cfg.responseType; + } + if (typeof cfg.promptSaveAs === 'string') { + queryParams['response-content-disposition'] = + 'attachment; filename="' + cfg.promptSaveAs + '"'; + } + if (typeof cfg.responseDisposition === 'string') { + queryParams['response-content-disposition'] = cfg.responseDisposition; + } + if (this.generation) { + queryParams['generation'] = this.generation.toString(); + } + const signConfig = { + method, + expires: cfg.expires, + accessibleAt: cfg.accessibleAt, + extensionHeaders, + queryParams, + contentMd5: cfg.contentMd5, + contentType: cfg.contentType, + host: cfg.host, + }; + if (cfg.cname) { + signConfig.cname = cfg.cname; + } + if (cfg.version) { + signConfig.version = cfg.version; + } + if (cfg.virtualHostedStyle) { + signConfig.virtualHostedStyle = cfg.virtualHostedStyle; + } + if (!this.signer) { + this.signer = new signer_js_1.URLSigner(this.storage.authClient, this.bucket, this, this.storage); + } + this.signer + .getSignedUrl(signConfig) + .then(signedUrl => callback(null, signedUrl), callback); + } + /** + * @callback IsPublicCallback + * @param {?Error} err Request error, if any. + * @param {boolean} resp Whether file is public or not. + */ + /** + * @typedef {array} IsPublicResponse + * @property {boolean} 0 Whether file is public or not. + */ + /** + * Check whether this file is public or not by sending + * a HEAD request without credentials. + * No errors from the server indicates that the current + * file is public. + * A 403-Forbidden error {@link https://cloud.google.com/storage/docs/json_api/v1/status-codes#403_Forbidden} + * indicates that file is private. + * Any other non 403 error is propagated to user. + * + * @param {IsPublicCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * // Check whether the file is publicly accessible. + * //- + * file.isPublic(function(err, resp) { + * if (err) { + * console.error(err); + * return; + * } + * console.log(`the file ${file.id} is public: ${resp}`) ; + * }) + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.isPublic().then(function(data) { + * const resp = data[0]; + * }); + * ``` + */ + isPublic(callback) { + var _a; + // Build any custom headers based on the defined interceptors on the parent + // storage object and this object + const storageInterceptors = ((_a = this.storage) === null || _a === void 0 ? void 0 : _a.interceptors) || []; + const fileInterceptors = this.interceptors || []; + const allInterceptors = storageInterceptors.concat(fileInterceptors); + const headers = allInterceptors.reduce((acc, curInterceptor) => { + const currentHeaders = curInterceptor.request({ + uri: `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`, + }); + Object.assign(acc, currentHeaders.headers); + return acc; + }, {}); + index_js_1.util.makeRequest({ + method: 'GET', + uri: `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`, + headers, + }, { + retryOptions: this.storage.retryOptions, + }, (err) => { + if (err) { + const apiError = err; + if (apiError.code === 403) { + callback(null, false); + } + else { + callback(err); + } + } + else { + callback(null, true); + } + }); + } + /** + * @typedef {object} MakeFilePrivateOptions Configuration options for File#makePrivate(). + * @property {Metadata} [metadata] Define custom metadata properties to define + * along with the operation. + * @property {boolean} [strict] If true, set the file to be private to + * only the owner user. Otherwise, it will be private to the project. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback MakeFilePrivateCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} MakeFilePrivateResponse + * @property {object} 0 The full API response. + */ + /** + * Make a file private to the project and remove all other permissions. + * Set `options.strict` to true to make the file private to only the owner. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/patch| Objects: patch API Documentation} + * + * @param {MakeFilePrivateOptions} [options] Configuration options. + * @param {MakeFilePrivateCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * // Set the file private so only project maintainers can see and modify it. + * //- + * file.makePrivate(function(err) {}); + * + * //- + * // Set the file private so only the owner can see and modify it. + * //- + * file.makePrivate({ strict: true }, function(err) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.makePrivate().then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + makePrivate(optionsOrCallback, callback) { + var _a, _b; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const query = { + predefinedAcl: options.strict ? 'private' : 'projectPrivate', + // eslint-disable-next-line @typescript-eslint/no-explicit-any + }; + if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifMetagenerationMatch) !== undefined) { + query.ifMetagenerationMatch = + (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifMetagenerationMatch; + delete options.preconditionOpts; + } + if (options.userProject) { + query.userProject = options.userProject; + } + // You aren't allowed to set both predefinedAcl & acl properties on a file, + // so acl must explicitly be nullified, destroying all previous acls on the + // file. + const metadata = { ...options.metadata, acl: null }; + this.setMetadata(metadata, query, callback); + } + /** + * @typedef {array} MakeFilePublicResponse + * @property {object} 0 The full API response. + */ + /** + * @callback MakeFilePublicCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Set a file to be publicly readable and maintain all previous permissions. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/insert| ObjectAccessControls: insert API Documentation} + * + * @param {MakeFilePublicCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.makePublic(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.makePublic().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_make_public + * Another example: + */ + makePublic(callback) { + callback = callback || index_js_1.util.noop; + this.acl.add({ + entity: 'allUsers', + role: 'READER', + }, (err, acl, resp) => { + callback(err, resp); + }); + } + /** + * The public URL of this File + * Use {@link File#makePublic} to enable anonymous access via the returned URL. + * + * @returns {string} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const file = bucket.file('my-file'); + * + * // publicUrl will be "https://storage.googleapis.com/albums/my-file" + * const publicUrl = file.publicUrl(); + * ``` + */ + publicUrl() { + return `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`; + } + /** + * @typedef {array} MoveFileAtomicResponse + * @property {File} 0 The moved {@link File}. + * @property {object} 1 The full API response. + */ + /** + * @callback MoveFileAtomicCallback + * @param {?Error} err Request error, if any. + * @param {File} movedFile The moved {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} MoveFileAtomicOptions Configuration options for File#moveFileAtomic(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {object} [preconditionOpts] Precondition options. + * @property {number} [preconditionOpts.ifGenerationMatch] Makes the operation conditional on whether the object's current generation matches the given value. + */ + /** + * Move this file within the same HNS-enabled bucket. + * The source object must exist and be a live object. + * The source and destination object IDs must be different. + * Overwriting the destination object is allowed by default, but can be prevented + * using preconditions. + * If the destination path includes non-existent parent folders, they will be created. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/move| Objects: move API Documentation} + * + * @throws {Error} If the destination file is not provided. + * + * @param {string|File} destination Destination file name or File object within the same bucket.. + * @param {MoveFileAtomicOptions} [options] Configuration options. See an + * @param {MoveFileAtomicCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Assume 'my-hns-bucket' is an HNS-enabled bucket. + * //- + * const bucket = storage.bucket('my-hns-bucket'); + * const file = bucket.file('my-image.png'); + * + * //- + * // If you pass in a string for the destination, the file is copied to its + * // current bucket, under the new name provided. + * //- + * file.moveFileAtomic('moved-image.png', function(err, movedFile, apiResponse) { + * // `my-hns-bucket` now contains: + * // - "moved-image.png" + * + * // `movedFile` is an instance of a File object that refers to your new + * // file. + * }); + * + * //- + * // Move the file to a subdirectory, creating parent folders if necessary. + * //- + * file.moveFileAtomic('new-folder/subfolder/moved-image.png', function(err, movedFile, apiResponse) { + * // `my-hns-bucket` now contains: + * // - "new-folder/subfolder/moved-image.png" + * }); + * + * //- + * // Prevent overwriting an existing destination object using preconditions. + * //- + * file.moveFileAtomic('existing-destination.png', { + * preconditionOpts: { + * ifGenerationMatch: 0 // Fails if the destination object exists. + * } + * }, function(err, movedFile, apiResponse) { + * if (err) { + * // Handle the error (e.g., the destination object already exists). + * } else { + * // Move successful. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.moveFileAtomic('moved-image.png).then(function(data) { + * const newFile = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_move_file_hns + * Another example: + */ + moveFileAtomic(destination, optionsOrCallback, callback) { + var _a, _b; + const noDestinationError = new Error(FileExceptionMessages.DESTINATION_NO_NAME); + if (!destination) { + throw noDestinationError; + } + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = { ...optionsOrCallback }; + } + callback = callback || index_js_1.util.noop; + let destName; + let newFile; + if (typeof destination === 'string') { + const parsedDestination = GS_URL_REGEXP.exec(destination); + if (parsedDestination !== null && parsedDestination.length === 3) { + destName = parsedDestination[2]; + } + else { + destName = destination; + } + } + else if (destination instanceof File) { + destName = destination.name; + newFile = destination; + } + else { + throw noDestinationError; + } + newFile = newFile || this.bucket.file(destName); + if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) { + this.storage.retryOptions.autoRetry = false; + } + const query = {}; + if (options.userProject !== undefined) { + query.userProject = options.userProject; + delete options.userProject; + } + if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined) { + query.ifGenerationMatch = (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch; + delete options.preconditionOpts; + } + this.request({ + method: 'POST', + uri: `/moveTo/o/${encodeURIComponent(newFile.name)}`, + qs: query, + json: options, + }, (err, resp) => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + if (err) { + callback(err, null, resp); + return; + } + callback(null, newFile, resp); + }); + } + /** + * @typedef {array} MoveResponse + * @property {File} 0 The destination File. + * @property {object} 1 The full API response. + */ + /** + * @callback MoveCallback + * @param {?Error} err Request error, if any. + * @param {?File} destinationFile The destination File. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} MoveOptions Configuration options for File#move(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Move this file to another location. By default, this will rename the file + * and keep it in the same bucket, but you can choose to move it to another + * Bucket by providing a Bucket or File object or a URL beginning with + * "gs://". + * + * **Warning**: + * There is currently no atomic `move` method in the Cloud Storage API, + * so this method is a composition of {@link File#copy} (to the new + * location) and {@link File#delete} (from the old location). While + * unlikely, it is possible that an error returned to your callback could be + * triggered from either one of these API calls failing, which could leave a + * duplicate file lingering. The error message will indicate what operation + * has failed. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/copy| Objects: copy API Documentation} + * + * @throws {Error} If the destination file is not provided. + * + * @param {string|Bucket|File} destination Destination file. + * @param {MoveCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * //- + * // You can pass in a variety of types for the destination. + * // + * // For all of the below examples, assume we are working with the following + * // Bucket and File objects. + * //- + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('my-image.png'); + * + * //- + * // If you pass in a string for the destination, the file is moved to its + * // current bucket, under the new name provided. + * //- + * file.move('my-image-new.png', function(err, destinationFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // but contains instead: + * // - "my-image-new.png" + * + * // `destinationFile` is an instance of a File object that refers to your + * // new file. + * }); + * + * //- + * // If you pass in a string starting with "gs://" for the destination, the + * // file is copied to the other bucket and under the new name provided. + * //- + * const newLocation = 'gs://another-bucket/my-image-new.png'; + * file.move(newLocation, function(err, destinationFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-image-new.png" + * + * // `destinationFile` is an instance of a File object that refers to your + * // new file. + * }); + * + * //- + * // If you pass in a Bucket object, the file will be moved to that bucket + * // using the same name. + * //- + * const anotherBucket = gcs.bucket('another-bucket'); + * + * file.move(anotherBucket, function(err, destinationFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-image.png" + * + * // `destinationFile` is an instance of a File object that refers to your + * // new file. + * }); + * + * //- + * // If you pass in a File object, you have complete control over the new + * // bucket and filename. + * //- + * const anotherFile = anotherBucket.file('my-awesome-image.png'); + * + * file.move(anotherFile, function(err, destinationFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-awesome-image.png" + * + * // Note: + * // The `destinationFile` parameter is equal to `anotherFile`. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.move('my-image-new.png').then(function(data) { + * const destinationFile = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_move_file + * Another example: + */ + move(destination, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + callback = callback || index_js_1.util.noop; + this.copy(destination, options, (err, destinationFile, copyApiResponse) => { + if (err) { + err.message = 'file#copy failed with an error - ' + err.message; + callback(err, null, copyApiResponse); + return; + } + if (this.name !== destinationFile.name || + this.bucket.name !== destinationFile.bucket.name) { + this.delete(options, (err, apiResponse) => { + if (err) { + err.message = 'file#delete failed with an error - ' + err.message; + callback(err, destinationFile, apiResponse); + return; + } + callback(null, destinationFile, copyApiResponse); + }); + } + else { + callback(null, destinationFile, copyApiResponse); + } + }); + } + /** + * @typedef {array} RenameResponse + * @property {File} 0 The destination File. + * @property {object} 1 The full API response. + */ + /** + * @callback RenameCallback + * @param {?Error} err Request error, if any. + * @param {?File} destinationFile The destination File. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} RenameOptions Configuration options for File#move(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Rename this file. + * + * **Warning**: + * There is currently no atomic `rename` method in the Cloud Storage API, + * so this method is an alias of {@link File#move}, which in turn is a + * composition of {@link File#copy} (to the new location) and + * {@link File#delete} (from the old location). While + * unlikely, it is possible that an error returned to your callback could be + * triggered from either one of these API calls failing, which could leave a + * duplicate file lingering. The error message will indicate what operation + * has failed. + * + * @param {string|File} destinationFile Destination file. + * @param {RenameCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // You can pass in a string or a File object. + * // + * // For all of the below examples, assume we are working with the following + * // Bucket and File objects. + * //- + * + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('my-image.png'); + * + * //- + * // You can pass in a string for the destinationFile. + * //- + * file.rename('renamed-image.png', function(err, renamedFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // but contains instead: + * // - "renamed-image.png" + * + * // `renamedFile` is an instance of a File object that refers to your + * // renamed file. + * }); + * + * //- + * // You can pass in a File object. + * //- + * const anotherFile = anotherBucket.file('my-awesome-image.png'); + * + * file.rename(anotherFile, function(err, renamedFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * + * // Note: + * // The `renamedFile` parameter is equal to `anotherFile`. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.rename('my-renamed-image.png').then(function(data) { + * const renamedFile = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + rename(destinationFile, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + callback = callback || index_js_1.util.noop; + this.move(destinationFile, options, callback); + } + /** + * @typedef {object} RestoreOptions Options for File#restore(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + * @param {number} [generation] If present, selects a specific revision of this object. + * @param {string} [restoreToken] Returns an option that must be specified when getting a soft-deleted object from an HNS-enabled + * bucket that has a naming and generation conflict with another object in the same bucket. + * @param {string} [projection] Specifies the set of properties to return. If used, must be 'full' or 'noAcl'. + * @param {string | number} [ifGenerationMatch] Request proceeds if the generation of the target resource + * matches the value used in the precondition. + * If the values don't match, the request fails with a 412 Precondition Failed response. + * @param {string | number} [ifGenerationNotMatch] Request proceeds if the generation of the target resource does + * not match the value used in the precondition. If the values match, the request fails with a 304 Not Modified response. + * @param {string | number} [ifMetagenerationMatch] Request proceeds if the meta-generation of the target resource + * matches the value used in the precondition. + * If the values don't match, the request fails with a 412 Precondition Failed response. + * @param {string | number} [ifMetagenerationNotMatch] Request proceeds if the meta-generation of the target resource does + * not match the value used in the precondition. If the values match, the request fails with a 304 Not Modified response. + */ + /** + * Restores a soft-deleted file + * @param {RestoreOptions} options Restore options. + * @returns {Promise} + */ + async restore(options) { + const [file] = await this.request({ + method: 'POST', + uri: '/restore', + qs: options, + }); + return file; + } + /** + * Makes request and applies userProject query parameter if necessary. + * + * @private + * + * @param {object} reqOpts - The request options. + * @param {function} callback - The callback function. + */ + request(reqOpts, callback) { + return this.parent.request.call(this, reqOpts, callback); + } + /** + * @callback RotateEncryptionKeyCallback + * @extends CopyCallback + */ + /** + * @typedef RotateEncryptionKeyResponse + * @extends CopyResponse + */ + /** + * @param {string|buffer|object} RotateEncryptionKeyOptions Configuration options + * for File#rotateEncryptionKey(). + * If a string or Buffer is provided, it is interpreted as an AES-256, + * customer-supplied encryption key. If you'd like to use a Cloud KMS key + * name, you must specify an options object with the property name: + * `kmsKeyName`. + * @param {string|buffer} [options.encryptionKey] An AES-256 encryption key. + * @param {string} [options.kmsKeyName] A Cloud KMS key name. + */ + /** + * This method allows you to update the encryption key associated with this + * file. + * + * See {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys} + * + * @param {RotateEncryptionKeyOptions} [options] - Configuration options. + * @param {RotateEncryptionKeyCallback} [callback] + * @returns {Promise} + * + * @example include:samples/encryption.js + * region_tag:storage_rotate_encryption_key + * Example of rotating the encryption key for this file: + */ + rotateEncryptionKey(optionsOrCallback, callback) { + var _a; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + let options = {}; + if (typeof optionsOrCallback === 'string' || + optionsOrCallback instanceof Buffer) { + options = { + encryptionKey: optionsOrCallback, + }; + } + else if (typeof optionsOrCallback === 'object') { + options = optionsOrCallback; + } + const newFile = this.bucket.file(this.id, options); + const copyOptions = ((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined + ? { preconditionOpts: options.preconditionOpts } + : {}; + this.copy(newFile, copyOptions, callback); + } + /** + * @typedef {object} SaveOptions + * @extends CreateWriteStreamOptions + */ + /** + * @callback SaveCallback + * @param {?Error} err Request error, if any. + */ + /** + * Write strings or buffers to a file. + * + * *This is a convenience method which wraps {@link File#createWriteStream}.* + * To upload arbitrary data to a file, please use {@link File#createWriteStream} directly. + * + * Resumable uploads are automatically enabled and must be shut off explicitly + * by setting `options.resumable` to `false`. + * + * Multipart uploads with retryable error codes will be retried 3 times with exponential backoff. + * + *

+ * There is some overhead when using a resumable upload that can cause + * noticeable performance degradation while uploading a series of small + * files. When uploading files less than 10MB, it is recommended that the + * resumable feature is disabled. + *

+ * + * @param {SaveData} data The data to write to a file. + * @param {SaveOptions} [options] See {@link File#createWriteStream}'s `options` + * parameter. + * @param {SaveCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * const contents = 'This is the contents of the file.'; + * + * file.save(contents, function(err) { + * if (!err) { + * // File written successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.save(contents).then(function() {}); + * ``` + */ + save(data, optionsOrCallback, callback) { + // tslint:enable:no-any + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + let maxRetries = this.storage.retryOptions.maxRetries; + if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) { + maxRetries = 0; + } + const returnValue = (0, async_retry_1.default)(async (bail) => { + return new Promise((resolve, reject) => { + if (maxRetries === 0) { + this.storage.retryOptions.autoRetry = false; + } + const writable = this.createWriteStream(options); + if (options.onUploadProgress) { + writable.on('progress', options.onUploadProgress); + } + const handleError = (err) => { + if (this.storage.retryOptions.autoRetry && + this.storage.retryOptions.retryableErrorFn(err)) { + return reject(err); + } + return bail(err); + }; + if (typeof data === 'string' || + Buffer.isBuffer(data) || + data instanceof Uint8Array) { + writable + .on('error', handleError) + .on('finish', () => resolve()) + .end(data); + } + else { + (0, stream_1.pipeline)(data, writable, err => { + if (err) { + if (typeof data !== 'function') { + // Only PipelineSourceFunction can be retried. Async-iterables + // and Readable streams can only be consumed once. + return bail(err); + } + handleError(err); + } + else { + resolve(); + } + }); + } + }); + }, { + retries: maxRetries, + factor: this.storage.retryOptions.retryDelayMultiplier, + maxTimeout: this.storage.retryOptions.maxRetryDelay * 1000, //convert to milliseconds + maxRetryTime: this.storage.retryOptions.totalTimeout * 1000, //convert to milliseconds + }); + if (!callback) { + return returnValue; + } + else { + return returnValue + .then(() => { + if (callback) { + return callback(); + } + }) + .catch(callback); + } + } + setMetadata(metadata, optionsOrCallback, cb) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = + typeof optionsOrCallback === 'function' + ? optionsOrCallback + : cb; + this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, bucket_js_1.AvailableServiceObjectMethods.setMetadata, options); + super + .setMetadata(metadata, options) + .then(resp => cb(null, ...resp)) + .catch(cb) + .finally(() => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + }); + } + /** + * @typedef {array} SetStorageClassResponse + * @property {object} 0 The full API response. + */ + /** + * @typedef {object} SetStorageClassOptions Configuration options for File#setStorageClass(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback SetStorageClassCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Set the storage class for this file. + * + * See {@link https://cloud.google.com/storage/docs/per-object-storage-class| Per-Object Storage Class} + * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes} + * + * @param {string} storageClass The new storage class. (`standard`, + * `nearline`, `coldline`, or `archive`) + * **Note:** The storage classes `multi_regional` and `regional` + * are now legacy and will be deprecated in the future. + * @param {SetStorageClassOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {SetStorageClassCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * file.setStorageClass('nearline', function(err, apiResponse) { + * if (err) { + * // Error handling omitted. + * } + * + * // The storage class was updated successfully. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.setStorageClass('nearline').then(function() {}); + * ``` + */ + setStorageClass(storageClass, optionsOrCallback, callback) { + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + const req = { + ...options, + // In case we get input like `storageClass`, convert to `storage_class`. + storageClass: storageClass + .replace(/-/g, '_') + .replace(/([a-z])([A-Z])/g, (_, low, up) => { + return low + '_' + up; + }) + .toUpperCase(), + }; + this.copy(this, req, (err, file, apiResponse) => { + if (err) { + callback(err, apiResponse); + return; + } + this.metadata = file.metadata; + callback(null, apiResponse); + }); + } + /** + * Set a user project to be billed for all requests made from this File + * object. + * + * @param {string} userProject The user project. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const file = bucket.file('my-file'); + * + * file.setUserProject('grape-spaceship-123'); + * ``` + */ + setUserProject(userProject) { + this.bucket.setUserProject.call(this, userProject); + } + /** + * This creates a resumable-upload upload stream. + * + * @param {Duplexify} stream - Duplexify stream of data to pipe to the file. + * @param {object=} options - Configuration object. + * + * @private + */ + startResumableUpload_(dup, options = {}) { + var _a; + (_a = options.metadata) !== null && _a !== void 0 ? _a : (options.metadata = {}); + const retryOptions = this.storage.retryOptions; + if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options.preconditionOpts)) { + retryOptions.autoRetry = false; + } + const cfg = { + authClient: this.storage.authClient, + apiEndpoint: this.storage.apiEndpoint, + bucket: this.bucket.name, + customRequestOptions: this.getRequestInterceptors().reduce((reqOpts, interceptorFn) => interceptorFn(reqOpts), {}), + file: this.name, + generation: this.generation, + isPartialUpload: options.isPartialUpload, + key: this.encryptionKey, + kmsKeyName: this.kmsKeyName, + metadata: options.metadata, + offset: options.offset, + predefinedAcl: options.predefinedAcl, + private: options.private, + public: options.public, + uri: options.uri, + userProject: options.userProject || this.userProject, + retryOptions: { ...retryOptions }, + params: (options === null || options === void 0 ? void 0 : options.preconditionOpts) || this.instancePreconditionOpts, + chunkSize: options === null || options === void 0 ? void 0 : options.chunkSize, + highWaterMark: options === null || options === void 0 ? void 0 : options.highWaterMark, + universeDomain: this.bucket.storage.universeDomain, + [util_js_1.GCCL_GCS_CMD_KEY]: options[util_js_1.GCCL_GCS_CMD_KEY], + }; + let uploadStream; + try { + uploadStream = resumableUpload.upload(cfg); + } + catch (error) { + dup.destroy(error); + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + return; + } + uploadStream + .on('response', resp => { + dup.emit('response', resp); + }) + .on('uri', uri => { + dup.emit('uri', uri); + }) + .on('metadata', metadata => { + this.metadata = metadata; + dup.emit('metadata'); + }) + .on('finish', () => { + dup.emit('complete'); + }) + .on('progress', evt => dup.emit('progress', evt)); + dup.setWritable(uploadStream); + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + } + /** + * Takes a readable stream and pipes it to a remote file. Unlike + * `startResumableUpload_`, which uses the resumable upload technique, this + * method uses a simple upload (all or nothing). + * + * @param {Duplexify} dup - Duplexify stream of data to pipe to the file. + * @param {object=} options - Configuration object. + * + * @private + */ + startSimpleUpload_(dup, options = {}) { + var _a; + (_a = options.metadata) !== null && _a !== void 0 ? _a : (options.metadata = {}); + const apiEndpoint = this.storage.apiEndpoint; + const bucketName = this.bucket.name; + const uri = `${apiEndpoint}/upload/storage/v1/b/${bucketName}/o`; + const reqOpts = { + qs: { + name: this.name, + }, + uri: uri, + [util_js_1.GCCL_GCS_CMD_KEY]: options[util_js_1.GCCL_GCS_CMD_KEY], + }; + if (this.generation !== undefined) { + reqOpts.qs.ifGenerationMatch = this.generation; + } + if (this.kmsKeyName !== undefined) { + reqOpts.qs.kmsKeyName = this.kmsKeyName; + } + if (typeof options.timeout === 'number') { + reqOpts.timeout = options.timeout; + } + if (options.userProject || this.userProject) { + reqOpts.qs.userProject = options.userProject || this.userProject; + } + if (options.predefinedAcl) { + reqOpts.qs.predefinedAcl = options.predefinedAcl; + } + else if (options.private) { + reqOpts.qs.predefinedAcl = 'private'; + } + else if (options.public) { + reqOpts.qs.predefinedAcl = 'publicRead'; + } + Object.assign(reqOpts.qs, this.instancePreconditionOpts, options.preconditionOpts); + index_js_1.util.makeWritableStream(dup, { + makeAuthenticatedRequest: (reqOpts) => { + this.request(reqOpts, (err, body, resp) => { + if (err) { + dup.destroy(err); + return; + } + this.metadata = body; + dup.emit('metadata', body); + dup.emit('response', resp); + dup.emit('complete'); + }); + }, + metadata: options.metadata, + request: reqOpts, + }); + } + disableAutoRetryConditionallyIdempotent_( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + coreOpts, methodType, localPreconditionOptions) { + var _a, _b, _c, _d; + if ((typeof coreOpts === 'object' && + ((_b = (_a = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _a === void 0 ? void 0 : _a.qs) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined && + (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifGenerationMatch) === undefined && + methodType === bucket_js_1.AvailableServiceObjectMethods.delete && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + this.storage.retryOptions.autoRetry = false; + } + if ((typeof coreOpts === 'object' && + ((_d = (_c = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _c === void 0 ? void 0 : _c.qs) === null || _d === void 0 ? void 0 : _d.ifMetagenerationMatch) === undefined && + (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifMetagenerationMatch) === undefined && + methodType === bucket_js_1.AvailableServiceObjectMethods.setMetadata && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + this.storage.retryOptions.autoRetry = false; + } + } + async getBufferFromReadable(readable) { + const buf = []; + for await (const chunk of readable) { + buf.push(chunk); + } + return Buffer.concat(buf); + } +} +exports.File = File; +_File_instances = new WeakSet(), _File_validateIntegrity = +/** + * + * @param hashCalculatingStream + * @param verify + * @returns {boolean} Returns `true` if valid, throws with error otherwise + */ +async function _File_validateIntegrity(hashCalculatingStream, verify = {}) { + const metadata = this.metadata; + // If we're doing validation, assume the worst + let dataMismatch = !!(verify.crc32c || verify.md5); + if (verify.crc32c && metadata.crc32c) { + dataMismatch = !hashCalculatingStream.test('crc32c', metadata.crc32c); + } + if (verify.md5 && metadata.md5Hash) { + dataMismatch = !hashCalculatingStream.test('md5', metadata.md5Hash); + } + if (dataMismatch) { + const errors = []; + let code = ''; + let message = ''; + try { + await this.delete(); + if (verify.md5 && !metadata.md5Hash) { + code = 'MD5_NOT_AVAILABLE'; + message = FileExceptionMessages.MD5_NOT_AVAILABLE; + } + else { + code = 'FILE_NO_UPLOAD'; + message = FileExceptionMessages.UPLOAD_MISMATCH; + } + } + catch (e) { + const error = e; + code = 'FILE_NO_UPLOAD_DELETE'; + message = `${FileExceptionMessages.UPLOAD_MISMATCH_DELETE_FAIL}${error.message}`; + errors.push(error); + } + const error = new RequestError(message); + error.code = code; + error.errors = errors; + throw error; + } + return true; +}; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(File, { + exclude: [ + 'cloudStorageURI', + 'publicUrl', + 'request', + 'save', + 'setEncryptionKey', + 'shouldRetryBasedOnPreconditionAndIdempotencyStrat', + 'getBufferFromReadable', + 'restore', + ], +}); + + +/***/ }), + +/***/ 4873: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _HashStreamValidator_crc32cHash, _HashStreamValidator_md5Hash, _HashStreamValidator_md5Digest; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HashStreamValidator = void 0; +const crypto_1 = __nccwpck_require__(6982); +const stream_1 = __nccwpck_require__(2203); +const crc32c_js_1 = __nccwpck_require__(4317); +const file_js_1 = __nccwpck_require__(9975); +class HashStreamValidator extends stream_1.Transform { + constructor(options = {}) { + super(); + this.updateHashesOnly = false; + _HashStreamValidator_crc32cHash.set(this, undefined); + _HashStreamValidator_md5Hash.set(this, undefined); + _HashStreamValidator_md5Digest.set(this, ''); + this.crc32cEnabled = !!options.crc32c; + this.md5Enabled = !!options.md5; + this.updateHashesOnly = !!options.updateHashesOnly; + this.crc32cExpected = options.crc32cExpected; + this.md5Expected = options.md5Expected; + if (this.crc32cEnabled) { + if (options.crc32cInstance) { + __classPrivateFieldSet(this, _HashStreamValidator_crc32cHash, options.crc32cInstance, "f"); + } + else { + const crc32cGenerator = options.crc32cGenerator || crc32c_js_1.CRC32C_DEFAULT_VALIDATOR_GENERATOR; + __classPrivateFieldSet(this, _HashStreamValidator_crc32cHash, crc32cGenerator(), "f"); + } + } + if (this.md5Enabled) { + __classPrivateFieldSet(this, _HashStreamValidator_md5Hash, (0, crypto_1.createHash)('md5'), "f"); + } + } + /** + * Return the current CRC32C value, if available. + */ + get crc32c() { + var _a; + return (_a = __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f")) === null || _a === void 0 ? void 0 : _a.toString(); + } + _flush(callback) { + if (__classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f")) { + __classPrivateFieldSet(this, _HashStreamValidator_md5Digest, __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f").digest('base64'), "f"); + } + if (this.updateHashesOnly) { + callback(); + return; + } + // If we're doing validation, assume the worst-- a data integrity + // mismatch. If not, these tests won't be performed, and we can assume + // the best. + // We must check if the server decompressed the data on serve because hash + // validation is not possible in this case. + let failed = this.crc32cEnabled || this.md5Enabled; + if (this.crc32cEnabled && this.crc32cExpected) { + failed = !this.test('crc32c', this.crc32cExpected); + } + if (this.md5Enabled && this.md5Expected) { + failed = !this.test('md5', this.md5Expected); + } + if (failed) { + const mismatchError = new file_js_1.RequestError(file_js_1.FileExceptionMessages.DOWNLOAD_MISMATCH); + mismatchError.code = 'CONTENT_DOWNLOAD_MISMATCH'; + callback(mismatchError); + } + else { + callback(); + } + } + _transform(chunk, encoding, callback) { + this.push(chunk, encoding); + try { + if (__classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f")) + __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f").update(chunk); + if (__classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f")) + __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f").update(chunk); + callback(); + } + catch (e) { + callback(e); + } + } + test(hash, sum) { + const check = Buffer.isBuffer(sum) ? sum.toString('base64') : sum; + if (hash === 'crc32c' && __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f")) { + return __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f").validate(check); + } + if (hash === 'md5' && __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f")) { + return __classPrivateFieldGet(this, _HashStreamValidator_md5Digest, "f") === check; + } + return false; + } +} +exports.HashStreamValidator = HashStreamValidator; +_HashStreamValidator_crc32cHash = new WeakMap(), _HashStreamValidator_md5Hash = new WeakMap(), _HashStreamValidator_md5Digest = new WeakMap(); + + +/***/ }), + +/***/ 5891: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HmacKey = void 0; +const index_js_1 = __nccwpck_require__(1325); +const storage_js_1 = __nccwpck_require__(2848); +const promisify_1 = __nccwpck_require__(206); +/** + * The API-formatted resource description of the HMAC key. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name HmacKey#metadata + * @type {object} + */ +/** + * An HmacKey object contains metadata of an HMAC key created from a + * service account through the {@link Storage} client using + * {@link Storage#createHmacKey}. + * + * See {@link https://cloud.google.com/storage/docs/authentication/hmackeys| HMAC keys documentation} + * + * @class + */ +class HmacKey extends index_js_1.ServiceObject { + /** + * @typedef {object} HmacKeyOptions + * @property {string} [projectId] The project ID of the project that owns + * the service account of the requested HMAC key. If not provided, + * the project ID used to instantiate the Storage client will be used. + */ + /** + * Constructs an HmacKey object. + * + * Note: this only create a local reference to an HMAC key, to create + * an HMAC key, use {@link Storage#createHmacKey}. + * + * @param {Storage} storage The Storage instance this HMAC key is + * attached to. + * @param {string} accessId The unique accessId for this HMAC key. + * @param {HmacKeyOptions} options Constructor configurations. + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const hmacKey = storage.hmacKey('access-id'); + * ``` + */ + constructor(storage, accessId, options) { + const methods = { + /** + * @typedef {object} DeleteHmacKeyOptions + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * @typedef {array} DeleteHmacKeyResponse + * @property {object} 0 The full API response. + */ + /** + * @callback DeleteHmacKeyCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Deletes an HMAC key. + * Key state must be set to `INACTIVE` prior to deletion. + * Caution: HMAC keys cannot be recovered once you delete them. + * + * The authenticated user must have `storage.hmacKeys.delete` permission for the project in which the key exists. + * + * @method HmacKey#delete + * @param {DeleteHmacKeyOptions} [options] Configuration options. + * @param {DeleteHmacKeyCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Delete HMAC key after making the key inactive. + * //- + * const hmacKey = storage.hmacKey('ACCESS_ID'); + * hmacKey.setMetadata({state: 'INACTIVE'}, (err, hmacKeyMetadata) => { + * if (err) { + * // The request was an error. + * console.error(err); + * return; + * } + * hmacKey.delete((err) => { + * if (err) { + * console.error(err); + * return; + * } + * // The HMAC key is deleted. + * }); + * }); + * + * //- + * // If the callback is omitted, a promise is returned. + * //- + * const hmacKey = storage.hmacKey('ACCESS_ID'); + * hmacKey + * .setMetadata({state: 'INACTIVE'}) + * .then(() => { + * return hmacKey.delete(); + * }); + * ``` + */ + delete: true, + /** + * @callback GetHmacKeyCallback + * @param {?Error} err Request error, if any. + * @param {HmacKey} hmacKey this {@link HmacKey} instance. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} GetHmacKeyResponse + * @property {HmacKey} 0 This {@link HmacKey} instance. + * @property {object} 1 The full API response. + */ + /** + * @typedef {object} GetHmacKeyOptions + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * Retrieves and populate an HMAC key's metadata, and return + * this {@link HmacKey} instance. + * + * HmacKey.get() does not give the HMAC key secret, as + * it is only returned on creation. + * + * The authenticated user must have `storage.hmacKeys.get` permission + * for the project in which the key exists. + * + * @method HmacKey#get + * @param {GetHmacKeyOptions} [options] Configuration options. + * @param {GetHmacKeyCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Get the HmacKey's Metadata. + * //- + * storage.hmacKey('ACCESS_ID') + * .get((err, hmacKey) => { + * if (err) { + * // The request was an error. + * console.error(err); + * return; + * } + * // do something with the returned HmacKey object. + * }); + * + * //- + * // If the callback is omitted, a promise is returned. + * //- + * storage.hmacKey('ACCESS_ID') + * .get() + * .then((data) => { + * const hmacKey = data[0]; + * }); + * ``` + */ + get: true, + /** + * @typedef {object} GetHmacKeyMetadataOptions + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * Retrieves and populate an HMAC key's metadata, and return + * the HMAC key's metadata as an object. + * + * HmacKey.getMetadata() does not give the HMAC key secret, as + * it is only returned on creation. + * + * The authenticated user must have `storage.hmacKeys.get` permission + * for the project in which the key exists. + * + * @method HmacKey#getMetadata + * @param {GetHmacKeyMetadataOptions} [options] Configuration options. + * @param {HmacKeyMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Get the HmacKey's metadata and populate to the metadata property. + * //- + * storage.hmacKey('ACCESS_ID') + * .getMetadata((err, hmacKeyMetadata) => { + * if (err) { + * // The request was an error. + * console.error(err); + * return; + * } + * console.log(hmacKeyMetadata); + * }); + * + * //- + * // If the callback is omitted, a promise is returned. + * //- + * storage.hmacKey('ACCESS_ID') + * .getMetadata() + * .then((data) => { + * const hmacKeyMetadata = data[0]; + * console.log(hmacKeyMetadata); + * }); + * ``` + */ + getMetadata: true, + /** + * @typedef {object} SetHmacKeyMetadata Subset of {@link HmacKeyMetadata} to update. + * @property {string} state New state of the HmacKey. Either 'ACTIVE' or 'INACTIVE'. + * @property {string} [etag] Include an etag from a previous get HMAC key request + * to perform safe read-modify-write. + */ + /** + * @typedef {object} SetHmacKeyMetadataOptions + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * @callback HmacKeyMetadataCallback + * @param {?Error} err Request error, if any. + * @param {HmacKeyMetadata} metadata The updated {@link HmacKeyMetadata} object. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} HmacKeyMetadataResponse + * @property {HmacKeyMetadata} 0 The updated {@link HmacKeyMetadata} object. + * @property {object} 1 The full API response. + */ + /** + * Updates the state of an HMAC key. See {@link SetHmacKeyMetadata} for + * valid states. + * + * @method HmacKey#setMetadata + * @param {SetHmacKeyMetadata} metadata The new metadata. + * @param {SetHmacKeyMetadataOptions} [options] Configuration options. + * @param {HmacKeyMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * const metadata = { + * state: 'INACTIVE', + * }; + * + * storage.hmacKey('ACCESS_ID') + * .setMetadata(metadata, (err, hmacKeyMetadata) => { + * if (err) { + * // The request was an error. + * console.error(err); + * return; + * } + * console.log(hmacKeyMetadata); + * }); + * + * //- + * // If the callback is omitted, a promise is returned. + * //- + * storage.hmacKey('ACCESS_ID') + * .setMetadata(metadata) + * .then((data) => { + * const hmacKeyMetadata = data[0]; + * console.log(hmacKeyMetadata); + * }); + * ``` + */ + setMetadata: { + reqOpts: { + method: 'PUT', + }, + }, + }; + const projectId = (options && options.projectId) || storage.projectId; + super({ + parent: storage, + id: accessId, + baseUrl: `/projects/${projectId}/hmacKeys`, + methods, + }); + this.storage = storage; + this.instanceRetryValue = storage.retryOptions.autoRetry; + } + setMetadata(metadata, optionsOrCallback, cb) { + // ETag preconditions are not currently supported. Retries should be disabled if the idempotency strategy is not set to RetryAlways + if (this.storage.retryOptions.idempotencyStrategy !== + storage_js_1.IdempotencyStrategy.RetryAlways) { + this.storage.retryOptions.autoRetry = false; + } + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = + typeof optionsOrCallback === 'function' + ? optionsOrCallback + : cb; + super + .setMetadata(metadata, options) + .then(resp => cb(null, ...resp)) + .catch(cb) + .finally(() => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + }); + } +} +exports.HmacKey = HmacKey; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(HmacKey); + + +/***/ }), + +/***/ 2880: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Iam = exports.IAMExceptionMessages = void 0; +const promisify_1 = __nccwpck_require__(206); +const util_js_1 = __nccwpck_require__(4557); +var IAMExceptionMessages; +(function (IAMExceptionMessages) { + IAMExceptionMessages["POLICY_OBJECT_REQUIRED"] = "A policy object is required."; + IAMExceptionMessages["PERMISSIONS_REQUIRED"] = "Permissions are required."; +})(IAMExceptionMessages || (exports.IAMExceptionMessages = IAMExceptionMessages = {})); +/** + * Get and set IAM policies for your Cloud Storage bucket. + * + * See {@link https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management| Cloud Storage IAM Management} + * See {@link https://cloud.google.com/iam/docs/granting-changing-revoking-access| Granting, Changing, and Revoking Access} + * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} + * + * @constructor Iam + * + * @param {Bucket} bucket The parent instance. + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * // bucket.iam + * ``` + */ +class Iam { + constructor(bucket) { + this.request_ = bucket.request.bind(bucket); + this.resourceId_ = 'buckets/' + bucket.getId(); + } + /** + * @typedef {object} GetPolicyOptions Requested options for IAM#getPolicy(). + * @property {number} [requestedPolicyVersion] The version of IAM policies to + * request. If a policy with a condition is requested without setting + * this, the server will return an error. This must be set to a value + * of 3 to retrieve IAM policies containing conditions. This is to + * prevent client code that isn't aware of IAM conditions from + * interpreting and modifying policies incorrectly. The service might + * return a policy with version lower than the one that was requested, + * based on the feature syntax in the policy fetched. + * See {@link https://cloud.google.com/iam/docs/policies#versions| IAM Policy versions} + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} GetPolicyResponse + * @property {Policy} 0 The policy. + * @property {object} 1 The full API response. + */ + /** + * @typedef {object} Policy + * @property {PolicyBinding[]} policy.bindings Bindings associate members with roles. + * @property {string} [policy.etag] Etags are used to perform a read-modify-write. + * @property {number} [policy.version] The syntax schema version of the Policy. + * To set an IAM policy with conditional binding, this field must be set to + * 3 or greater. + * See {@link https://cloud.google.com/iam/docs/policies#versions| IAM Policy versions} + */ + /** + * @typedef {object} PolicyBinding + * @property {string} role Role that is assigned to members. + * @property {string[]} members Specifies the identities requesting access for the bucket. + * @property {Expr} [condition] The condition that is associated with this binding. + */ + /** + * @typedef {object} Expr + * @property {string} [title] An optional title for the expression, i.e. a + * short string describing its purpose. This can be used e.g. in UIs + * which allow to enter the expression. + * @property {string} [description] An optional description of the + * expression. This is a longer text which describes the expression, + * e.g. when hovered over it in a UI. + * @property {string} expression Textual representation of an expression in + * Common Expression Language syntax. The application context of the + * containing message determines which well-known feature set of CEL + * is supported.The condition that is associated with this binding. + * + * @see [Condition] https://cloud.google.com/storage/docs/access-control/iam#conditions + */ + /** + * Get the IAM policy. + * + * @param {GetPolicyOptions} [options] Request options. + * @param {GetPolicyCallback} [callback] Callback function. + * @returns {Promise} + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/getIamPolicy| Buckets: setIamPolicy API Documentation} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * bucket.iam.getPolicy( + * {requestedPolicyVersion: 3}, + * function(err, policy, apiResponse) { + * + * }, + * ); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.getPolicy({requestedPolicyVersion: 3}) + * .then(function(data) { + * const policy = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/iam.js + * region_tag:storage_view_bucket_iam_members + * Example of retrieving a bucket's IAM policy: + */ + getPolicy(optionsOrCallback, callback) { + const { options, callback: cb } = (0, util_js_1.normalize)(optionsOrCallback, callback); + const qs = {}; + if (options.userProject) { + qs.userProject = options.userProject; + } + if (options.requestedPolicyVersion !== null && + options.requestedPolicyVersion !== undefined) { + qs.optionsRequestedPolicyVersion = options.requestedPolicyVersion; + } + this.request_({ + uri: '/iam', + qs, + }, cb); + } + /** + * Set the IAM policy. + * + * @throws {Error} If no policy is provided. + * + * @param {Policy} policy The policy. + * @param {SetPolicyOptions} [options] Configuration options. + * @param {SetPolicyCallback} callback Callback function. + * @returns {Promise} + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/setIamPolicy| Buckets: setIamPolicy API Documentation} + * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * const myPolicy = { + * bindings: [ + * { + * role: 'roles/storage.admin', + * members: + * ['serviceAccount:myotherproject@appspot.gserviceaccount.com'] + * } + * ] + * }; + * + * bucket.iam.setPolicy(myPolicy, function(err, policy, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.setPolicy(myPolicy).then(function(data) { + * const policy = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/iam.js + * region_tag:storage_add_bucket_iam_member + * Example of adding to a bucket's IAM policy: + * + * @example include:samples/iam.js + * region_tag:storage_remove_bucket_iam_member + * Example of removing from a bucket's IAM policy: + */ + setPolicy(policy, optionsOrCallback, callback) { + if (policy === null || typeof policy !== 'object') { + throw new Error(IAMExceptionMessages.POLICY_OBJECT_REQUIRED); + } + const { options, callback: cb } = (0, util_js_1.normalize)(optionsOrCallback, callback); + let maxRetries; + if (policy.etag === undefined) { + maxRetries = 0; + } + this.request_({ + method: 'PUT', + uri: '/iam', + maxRetries, + json: Object.assign({ + resourceId: this.resourceId_, + }, policy), + qs: options, + }, cb); + } + /** + * Test a set of permissions for a resource. + * + * @throws {Error} If permissions are not provided. + * + * @param {string|string[]} permissions The permission(s) to test for. + * @param {TestIamPermissionsOptions} [options] Configuration object. + * @param {TestIamPermissionsCallback} [callback] Callback function. + * @returns {Promise} + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/testIamPermissions| Buckets: testIamPermissions API Documentation} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * //- + * // Test a single permission. + * //- + * const test = 'storage.buckets.delete'; + * + * bucket.iam.testPermissions(test, function(err, permissions, apiResponse) { + * console.log(permissions); + * // { + * // "storage.buckets.delete": true + * // } + * }); + * + * //- + * // Test several permissions at once. + * //- + * const tests = [ + * 'storage.buckets.delete', + * 'storage.buckets.get' + * ]; + * + * bucket.iam.testPermissions(tests, function(err, permissions) { + * console.log(permissions); + * // { + * // "storage.buckets.delete": false, + * // "storage.buckets.get": true + * // } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.testPermissions(test).then(function(data) { + * const permissions = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + testPermissions(permissions, optionsOrCallback, callback) { + if (!Array.isArray(permissions) && typeof permissions !== 'string') { + throw new Error(IAMExceptionMessages.PERMISSIONS_REQUIRED); + } + const { options, callback: cb } = (0, util_js_1.normalize)(optionsOrCallback, callback); + const permissionsArray = Array.isArray(permissions) + ? permissions + : [permissions]; + const req = Object.assign({ + permissions: permissionsArray, + }, options); + this.request_({ + uri: '/iam/testPermissions', + qs: req, + useQuerystring: true, + }, (err, resp) => { + if (err) { + cb(err, null, resp); + return; + } + const availablePermissions = Array.isArray(resp.permissions) + ? resp.permissions + : []; + const permissionsHash = permissionsArray.reduce((acc, permission) => { + acc[permission] = availablePermissions.indexOf(permission) > -1; + return acc; + }, {}); + cb(null, permissionsHash, resp); + }); + } +} +exports.Iam = Iam; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Iam); + + +/***/ }), + +/***/ 8525: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Notification = exports.Iam = exports.HmacKey = exports.File = exports.Channel = exports.Bucket = exports.Storage = exports.RETRYABLE_ERR_FN_DEFAULT = exports.IdempotencyStrategy = exports.ApiError = void 0; +/** + * The `@google-cloud/storage` package has a single named export which is the + * {@link Storage} (ES6) class, which should be instantiated with `new`. + * + * See {@link Storage} and {@link ClientConfig} for client methods and + * configuration options. + * + * @module {Storage} @google-cloud/storage + * @alias nodejs-storage + * + * @example + * Install the client library with npm: + * ``` + * npm install --save @google-cloud/storage + * ``` + * + * @example + * Import the client library + * ``` + * const {Storage} = require('@google-cloud/storage'); + * ``` + * + * @example + * Create a client that uses Application + * Default Credentials (ADC): + * ``` + * const storage = new Storage(); + * ``` + * + * @example + * Create a client with explicit + * credentials: + * ``` + * const storage = new Storage({ projectId: + * 'your-project-id', keyFilename: '/path/to/keyfile.json' + * }); + * ``` + * + * @example include:samples/quickstart.js + * region_tag:storage_quickstart + * Full quickstart example: + */ +var index_js_1 = __nccwpck_require__(1325); +Object.defineProperty(exports, "ApiError", ({ enumerable: true, get: function () { return index_js_1.ApiError; } })); +var storage_js_1 = __nccwpck_require__(2848); +Object.defineProperty(exports, "IdempotencyStrategy", ({ enumerable: true, get: function () { return storage_js_1.IdempotencyStrategy; } })); +Object.defineProperty(exports, "RETRYABLE_ERR_FN_DEFAULT", ({ enumerable: true, get: function () { return storage_js_1.RETRYABLE_ERR_FN_DEFAULT; } })); +Object.defineProperty(exports, "Storage", ({ enumerable: true, get: function () { return storage_js_1.Storage; } })); +var bucket_js_1 = __nccwpck_require__(2887); +Object.defineProperty(exports, "Bucket", ({ enumerable: true, get: function () { return bucket_js_1.Bucket; } })); +__exportStar(__nccwpck_require__(4317), exports); +var channel_js_1 = __nccwpck_require__(2658); +Object.defineProperty(exports, "Channel", ({ enumerable: true, get: function () { return channel_js_1.Channel; } })); +var file_js_1 = __nccwpck_require__(9975); +Object.defineProperty(exports, "File", ({ enumerable: true, get: function () { return file_js_1.File; } })); +__exportStar(__nccwpck_require__(4873), exports); +var hmacKey_js_1 = __nccwpck_require__(5891); +Object.defineProperty(exports, "HmacKey", ({ enumerable: true, get: function () { return hmacKey_js_1.HmacKey; } })); +var iam_js_1 = __nccwpck_require__(2880); +Object.defineProperty(exports, "Iam", ({ enumerable: true, get: function () { return iam_js_1.Iam; } })); +var notification_js_1 = __nccwpck_require__(8598); +Object.defineProperty(exports, "Notification", ({ enumerable: true, get: function () { return notification_js_1.Notification; } })); +__exportStar(__nccwpck_require__(4), exports); + + +/***/ }), + +/***/ 1325: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.util = exports.ApiError = exports.ServiceObject = exports.Service = void 0; +var service_js_1 = __nccwpck_require__(8542); +Object.defineProperty(exports, "Service", ({ enumerable: true, get: function () { return service_js_1.Service; } })); +var service_object_js_1 = __nccwpck_require__(2908); +Object.defineProperty(exports, "ServiceObject", ({ enumerable: true, get: function () { return service_object_js_1.ServiceObject; } })); +var util_js_1 = __nccwpck_require__(7325); +Object.defineProperty(exports, "ApiError", ({ enumerable: true, get: function () { return util_js_1.ApiError; } })); +Object.defineProperty(exports, "util", ({ enumerable: true, get: function () { return util_js_1.util; } })); + + +/***/ }), + +/***/ 2908: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ServiceObject = void 0; +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +const promisify_1 = __nccwpck_require__(206); +const events_1 = __nccwpck_require__(4434); +const util_js_1 = __nccwpck_require__(7325); +/** + * ServiceObject is a base class, meant to be inherited from by a "service + * object," like a BigQuery dataset or Storage bucket. + * + * Most of the time, these objects share common functionality; they can be + * created or deleted, and you can get or set their metadata. + * + * By inheriting from this class, a service object will be extended with these + * shared behaviors. Note that any method can be overridden when the service + * object requires specific behavior. + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +class ServiceObject extends events_1.EventEmitter { + /* + * @constructor + * @alias module:common/service-object + * + * @private + * + * @param {object} config - Configuration object. + * @param {string} config.baseUrl - The base URL to make API requests to. + * @param {string} config.createMethod - The method which creates this object. + * @param {string=} config.id - The identifier of the object. For example, the + * name of a Storage bucket or Pub/Sub topic. + * @param {object=} config.methods - A map of each method name that should be inherited. + * @param {object} config.methods[].reqOpts - Default request options for this + * particular method. A common use case is when `setMetadata` requires a + * `PUT` method to override the default `PATCH`. + * @param {object} config.parent - The parent service instance. For example, an + * instance of Storage if the object is Bucket. + */ + constructor(config) { + super(); + this.metadata = {}; + this.baseUrl = config.baseUrl; + this.parent = config.parent; // Parent class. + this.id = config.id; // Name or ID (e.g. dataset ID, bucket name, etc). + this.createMethod = config.createMethod; + this.methods = config.methods || {}; + this.interceptors = []; + this.projectId = config.projectId; + if (config.methods) { + // This filters the ServiceObject instance (e.g. a "File") to only have + // the configured methods. We make a couple of exceptions for core- + // functionality ("request()" and "getRequestInterceptors()") + Object.getOwnPropertyNames(ServiceObject.prototype) + .filter(methodName => { + return ( + // All ServiceObjects need `request` and `getRequestInterceptors`. + // clang-format off + !/^request/.test(methodName) && + !/^getRequestInterceptors/.test(methodName) && + // clang-format on + // The ServiceObject didn't redefine the method. + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this[methodName] === + // eslint-disable-next-line @typescript-eslint/no-explicit-any + ServiceObject.prototype[methodName] && + // This method isn't wanted. + !config.methods[methodName]); + }) + .forEach(methodName => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this[methodName] = undefined; + }); + } + } + create(optionsOrCallback, callback) { + // eslint-disable-next-line @typescript-eslint/no-this-alias + const self = this; + const args = [this.id]; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + if (typeof optionsOrCallback === 'object') { + args.push(optionsOrCallback); + } + // Wrap the callback to return *this* instance of the object, not the + // newly-created one. + // tslint: disable-next-line no-any + function onCreate(...args) { + const [err, instance] = args; + if (!err) { + self.metadata = instance.metadata; + if (self.id && instance.metadata) { + self.id = instance.metadata.id; + } + args[1] = self; // replace the created `instance` with this one. + } + callback(...args); + } + args.push(onCreate); + // eslint-disable-next-line prefer-spread + this.createMethod.apply(null, args); + } + delete(optionsOrCallback, cb) { + var _a; + const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + const ignoreNotFound = options.ignoreNotFound; + delete options.ignoreNotFound; + const methodConfig = (typeof this.methods.delete === 'object' && this.methods.delete) || {}; + const reqOpts = { + method: 'DELETE', + uri: '', + ...methodConfig.reqOpts, + qs: { + ...(_a = methodConfig.reqOpts) === null || _a === void 0 ? void 0 : _a.qs, + ...options, + }, + }; + // The `request` method may have been overridden to hold any special + // behavior. Ensure we call the original `request` method. + ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => { + if (err) { + if (err.code === 404 && ignoreNotFound) { + err = null; + } + } + callback(err, res); + }); + } + exists(optionsOrCallback, cb) { + const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + this.get(options, err => { + if (err) { + if (err.code === 404) { + callback(null, false); + } + else { + callback(err); + } + return; + } + callback(null, true); + }); + } + get(optionsOrCallback, cb) { + // eslint-disable-next-line @typescript-eslint/no-this-alias + const self = this; + const [opts, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + const options = Object.assign({}, opts); + const autoCreate = options.autoCreate && typeof this.create === 'function'; + delete options.autoCreate; + function onCreate(err, instance, apiResponse) { + if (err) { + if (err.code === 409) { + self.get(options, callback); + return; + } + callback(err, null, apiResponse); + return; + } + callback(null, instance, apiResponse); + } + this.getMetadata(options, (err, metadata) => { + if (err) { + if (err.code === 404 && autoCreate) { + const args = []; + if (Object.keys(options).length > 0) { + args.push(options); + } + args.push(onCreate); + self.create(...args); + return; + } + callback(err, null, metadata); + return; + } + callback(null, self, metadata); + }); + } + getMetadata(optionsOrCallback, cb) { + var _a; + const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + const methodConfig = (typeof this.methods.getMetadata === 'object' && + this.methods.getMetadata) || + {}; + const reqOpts = { + uri: '', + ...methodConfig.reqOpts, + qs: { + ...(_a = methodConfig.reqOpts) === null || _a === void 0 ? void 0 : _a.qs, + ...options, + }, + }; + // The `request` method may have been overridden to hold any special + // behavior. Ensure we call the original `request` method. + ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => { + this.metadata = body; + callback(err, this.metadata, res); + }); + } + /** + * Return the user's custom request interceptors. + */ + getRequestInterceptors() { + // Interceptors should be returned in the order they were assigned. + const localInterceptors = this.interceptors + .filter(interceptor => typeof interceptor.request === 'function') + .map(interceptor => interceptor.request); + return this.parent.getRequestInterceptors().concat(localInterceptors); + } + setMetadata(metadata, optionsOrCallback, cb) { + var _a, _b; + const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + const methodConfig = (typeof this.methods.setMetadata === 'object' && + this.methods.setMetadata) || + {}; + const reqOpts = { + method: 'PATCH', + uri: '', + ...methodConfig.reqOpts, + json: { + ...(_a = methodConfig.reqOpts) === null || _a === void 0 ? void 0 : _a.json, + ...metadata, + }, + qs: { + ...(_b = methodConfig.reqOpts) === null || _b === void 0 ? void 0 : _b.qs, + ...options, + }, + }; + // The `request` method may have been overridden to hold any special + // behavior. Ensure we call the original `request` method. + ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => { + this.metadata = body; + callback(err, this.metadata, res); + }); + } + request_(reqOpts, callback) { + reqOpts = { ...reqOpts }; + if (this.projectId) { + reqOpts.projectId = this.projectId; + } + const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0; + const uriComponents = [this.baseUrl, this.id || '', reqOpts.uri]; + if (isAbsoluteUrl) { + uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri)); + } + reqOpts.uri = uriComponents + .filter(x => x.trim()) // Limit to non-empty strings. + .map(uriComponent => { + const trimSlashesRegex = /^\/*|\/*$/g; + return uriComponent.replace(trimSlashesRegex, ''); + }) + .join('/'); + const childInterceptors = Array.isArray(reqOpts.interceptors_) + ? reqOpts.interceptors_ + : []; + const localInterceptors = [].slice.call(this.interceptors); + reqOpts.interceptors_ = childInterceptors.concat(localInterceptors); + if (reqOpts.shouldReturnStream) { + return this.parent.requestStream(reqOpts); + } + this.parent.request(reqOpts, callback); + } + request(reqOpts, callback) { + this.request_(reqOpts, callback); + } + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + */ + requestStream(reqOpts) { + const opts = { ...reqOpts, shouldReturnStream: true }; + return this.request_(opts); + } +} +exports.ServiceObject = ServiceObject; +(0, promisify_1.promisifyAll)(ServiceObject, { exclude: ['getRequestInterceptors'] }); + + +/***/ }), + +/***/ 8542: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Service = exports.DEFAULT_PROJECT_ID_TOKEN = void 0; +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +const google_auth_library_1 = __nccwpck_require__(492); +const uuid = __importStar(__nccwpck_require__(2048)); +const util_js_1 = __nccwpck_require__(7325); +const util_js_2 = __nccwpck_require__(4557); +exports.DEFAULT_PROJECT_ID_TOKEN = '{{projectId}}'; +class Service { + /** + * Service is a base class, meant to be inherited from by a "service," like + * BigQuery or Storage. + * + * This handles making authenticated requests by exposing a `makeReq_` + * function. + * + * @constructor + * @alias module:common/service + * + * @param {object} config - Configuration object. + * @param {string} config.baseUrl - The base URL to make API requests to. + * @param {string[]} config.scopes - The scopes required for the request. + * @param {object=} options - [Configuration object](#/docs). + */ + constructor(config, options = {}) { + this.baseUrl = config.baseUrl; + this.apiEndpoint = config.apiEndpoint; + this.timeout = options.timeout; + this.globalInterceptors = Array.isArray(options.interceptors_) + ? options.interceptors_ + : []; + this.interceptors = []; + this.packageJson = config.packageJson; + this.projectId = options.projectId || exports.DEFAULT_PROJECT_ID_TOKEN; + this.projectIdRequired = config.projectIdRequired !== false; + this.providedUserAgent = options.userAgent; + this.universeDomain = options.universeDomain || google_auth_library_1.DEFAULT_UNIVERSE; + this.customEndpoint = config.customEndpoint || false; + this.makeAuthenticatedRequest = util_js_1.util.makeAuthenticatedRequestFactory({ + ...config, + projectIdRequired: this.projectIdRequired, + projectId: this.projectId, + authClient: options.authClient || config.authClient, + credentials: options.credentials, + keyFile: options.keyFilename, + email: options.email, + clientOptions: { + universeDomain: options.universeDomain, + ...options.clientOptions, + }, + }); + this.authClient = this.makeAuthenticatedRequest.authClient; + const isCloudFunctionEnv = !!process.env.FUNCTION_NAME; + if (isCloudFunctionEnv) { + this.interceptors.push({ + request(reqOpts) { + reqOpts.forever = false; + return reqOpts; + }, + }); + } + } + /** + * Return the user's custom request interceptors. + */ + getRequestInterceptors() { + // Interceptors should be returned in the order they were assigned. + return [].slice + .call(this.globalInterceptors) + .concat(this.interceptors) + .filter(interceptor => typeof interceptor.request === 'function') + .map(interceptor => interceptor.request); + } + getProjectId(callback) { + if (!callback) { + return this.getProjectIdAsync(); + } + this.getProjectIdAsync().then(p => callback(null, p), callback); + } + async getProjectIdAsync() { + const projectId = await this.authClient.getProjectId(); + if (this.projectId === exports.DEFAULT_PROJECT_ID_TOKEN && projectId) { + this.projectId = projectId; + } + return this.projectId; + } + request_(reqOpts, callback) { + reqOpts = { ...reqOpts, timeout: this.timeout }; + const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0; + const uriComponents = [this.baseUrl]; + if (this.projectIdRequired) { + if (reqOpts.projectId) { + uriComponents.push('projects'); + uriComponents.push(reqOpts.projectId); + } + else { + uriComponents.push('projects'); + uriComponents.push(this.projectId); + } + } + uriComponents.push(reqOpts.uri); + if (isAbsoluteUrl) { + uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri)); + } + reqOpts.uri = uriComponents + .map(uriComponent => { + const trimSlashesRegex = /^\/*|\/*$/g; + return uriComponent.replace(trimSlashesRegex, ''); + }) + .join('/') + // Some URIs have colon separators. + // Bad: https://.../projects/:list + // Good: https://.../projects:list + .replace(/\/:/g, ':'); + const requestInterceptors = this.getRequestInterceptors(); + const interceptorArray = Array.isArray(reqOpts.interceptors_) + ? reqOpts.interceptors_ + : []; + interceptorArray.forEach(interceptor => { + if (typeof interceptor.request === 'function') { + requestInterceptors.push(interceptor.request); + } + }); + requestInterceptors.forEach(requestInterceptor => { + reqOpts = requestInterceptor(reqOpts); + }); + delete reqOpts.interceptors_; + const pkg = this.packageJson; + let userAgent = (0, util_js_2.getUserAgentString)(); + if (this.providedUserAgent) { + userAgent = `${this.providedUserAgent} ${userAgent}`; + } + reqOpts.headers = { + ...reqOpts.headers, + 'User-Agent': userAgent, + 'x-goog-api-client': `${(0, util_js_2.getRuntimeTrackingString)()} gccl/${pkg.version}-${(0, util_js_2.getModuleFormat)()} gccl-invocation-id/${uuid.v4()}`, + }; + if (reqOpts[util_js_1.GCCL_GCS_CMD_KEY]) { + reqOpts.headers['x-goog-api-client'] += + ` gccl-gcs-cmd/${reqOpts[util_js_1.GCCL_GCS_CMD_KEY]}`; + } + if (reqOpts.shouldReturnStream) { + return this.makeAuthenticatedRequest(reqOpts); + } + else { + this.makeAuthenticatedRequest(reqOpts, callback); + } + } + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + * @param {function} callback - The callback function passed to `request`. + */ + request(reqOpts, callback) { + Service.prototype.request_.call(this, reqOpts, callback); + } + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + */ + requestStream(reqOpts) { + const opts = { ...reqOpts, shouldReturnStream: true }; + return Service.prototype.request_.call(this, opts); + } +} +exports.Service = Service; + + +/***/ }), + +/***/ 7325: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.util = exports.Util = exports.PartialFailureError = exports.ApiError = exports.GCCL_GCS_CMD_KEY = void 0; +/*! + * @module common/util + */ +const projectify_1 = __nccwpck_require__(7635); +const htmlEntities = __importStar(__nccwpck_require__(3660)); +const google_auth_library_1 = __nccwpck_require__(492); +const retry_request_1 = __importDefault(__nccwpck_require__(7842)); +const stream_1 = __nccwpck_require__(2203); +const teeny_request_1 = __nccwpck_require__(321); +const uuid = __importStar(__nccwpck_require__(2048)); +const service_js_1 = __nccwpck_require__(8542); +const util_js_1 = __nccwpck_require__(4557); +const duplexify_1 = __importDefault(__nccwpck_require__(9112)); +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +const package_json_helper_cjs_1 = __nccwpck_require__(1969); +const packageJson = (0, package_json_helper_cjs_1.getPackageJSON)(); +/** + * A unique symbol for providing a `gccl-gcs-cmd` value + * for the `X-Goog-API-Client` header. + * + * E.g. the `V` in `X-Goog-API-Client: gccl-gcs-cmd/V` + **/ +exports.GCCL_GCS_CMD_KEY = Symbol.for('GCCL_GCS_CMD'); +const requestDefaults = { + timeout: 60000, + gzip: true, + forever: true, + pool: { + maxSockets: Infinity, + }, +}; +/** + * Default behavior: Automatically retry retriable server errors. + * + * @const {boolean} + * @private + */ +const AUTO_RETRY_DEFAULT = true; +/** + * Default behavior: Only attempt to retry retriable errors 3 times. + * + * @const {number} + * @private + */ +const MAX_RETRY_DEFAULT = 3; +/** + * Custom error type for API errors. + * + * @param {object} errorBody - Error object. + */ +class ApiError extends Error { + constructor(errorBodyOrMessage) { + super(); + if (typeof errorBodyOrMessage !== 'object') { + this.message = errorBodyOrMessage || ''; + return; + } + const errorBody = errorBodyOrMessage; + this.code = errorBody.code; + this.errors = errorBody.errors; + this.response = errorBody.response; + try { + this.errors = JSON.parse(this.response.body).error.errors; + } + catch (e) { + this.errors = errorBody.errors; + } + this.message = ApiError.createMultiErrorMessage(errorBody, this.errors); + Error.captureStackTrace(this); + } + /** + * Pieces together an error message by combining all unique error messages + * returned from a single GoogleError + * + * @private + * + * @param {GoogleErrorBody} err The original error. + * @param {GoogleInnerError[]} [errors] Inner errors, if any. + * @returns {string} + */ + static createMultiErrorMessage(err, errors) { + const messages = new Set(); + if (err.message) { + messages.add(err.message); + } + if (errors && errors.length) { + errors.forEach(({ message }) => messages.add(message)); + } + else if (err.response && err.response.body) { + messages.add(htmlEntities.decode(err.response.body.toString())); + } + else if (!err.message) { + messages.add('A failure occurred during this request.'); + } + let messageArr = Array.from(messages); + if (messageArr.length > 1) { + messageArr = messageArr.map((message, i) => ` ${i + 1}. ${message}`); + messageArr.unshift('Multiple errors occurred during the request. Please see the `errors` array for complete details.\n'); + messageArr.push('\n'); + } + return messageArr.join('\n'); + } +} +exports.ApiError = ApiError; +/** + * Custom error type for partial errors returned from the API. + * + * @param {object} b - Error object. + */ +class PartialFailureError extends Error { + constructor(b) { + super(); + const errorObject = b; + this.errors = errorObject.errors; + this.name = 'PartialFailureError'; + this.response = errorObject.response; + this.message = ApiError.createMultiErrorMessage(errorObject, this.errors); + } +} +exports.PartialFailureError = PartialFailureError; +class Util { + constructor() { + this.ApiError = ApiError; + this.PartialFailureError = PartialFailureError; + } + /** + * No op. + * + * @example + * function doSomething(callback) { + * callback = callback || noop; + * } + */ + noop() { } + /** + * Uniformly process an API response. + * + * @param {*} err - Error value. + * @param {*} resp - Response value. + * @param {*} body - Body value. + * @param {function} callback - The callback function. + */ + handleResp(err, resp, body, callback) { + callback = callback || util.noop; + const parsedResp = { + err: err || null, + ...(resp && util.parseHttpRespMessage(resp)), + ...(body && util.parseHttpRespBody(body)), + }; + // Assign the parsed body to resp.body, even if { json: false } was passed + // as a request option. + // We assume that nobody uses the previously unparsed value of resp.body. + if (!parsedResp.err && resp && typeof parsedResp.body === 'object') { + parsedResp.resp.body = parsedResp.body; + } + if (parsedResp.err && resp) { + parsedResp.err.response = resp; + } + callback(parsedResp.err, parsedResp.body, parsedResp.resp); + } + /** + * Sniff an incoming HTTP response message for errors. + * + * @param {object} httpRespMessage - An incoming HTTP response message from `request`. + * @return {object} parsedHttpRespMessage - The parsed response. + * @param {?error} parsedHttpRespMessage.err - An error detected. + * @param {object} parsedHttpRespMessage.resp - The original response object. + */ + parseHttpRespMessage(httpRespMessage) { + const parsedHttpRespMessage = { + resp: httpRespMessage, + }; + if (httpRespMessage.statusCode < 200 || httpRespMessage.statusCode > 299) { + // Unknown error. Format according to ApiError standard. + parsedHttpRespMessage.err = new ApiError({ + errors: new Array(), + code: httpRespMessage.statusCode, + message: httpRespMessage.statusMessage, + response: httpRespMessage, + }); + } + return parsedHttpRespMessage; + } + /** + * Parse the response body from an HTTP request. + * + * @param {object} body - The response body. + * @return {object} parsedHttpRespMessage - The parsed response. + * @param {?error} parsedHttpRespMessage.err - An error detected. + * @param {object} parsedHttpRespMessage.body - The original body value provided + * will try to be JSON.parse'd. If it's successful, the parsed value will + * be returned here, otherwise the original value and an error will be returned. + */ + parseHttpRespBody(body) { + const parsedHttpRespBody = { + body, + }; + if (typeof body === 'string') { + try { + parsedHttpRespBody.body = JSON.parse(body); + } + catch (err) { + parsedHttpRespBody.body = body; + } + } + if (parsedHttpRespBody.body && parsedHttpRespBody.body.error) { + // Error from JSON API. + parsedHttpRespBody.err = new ApiError(parsedHttpRespBody.body.error); + } + return parsedHttpRespBody; + } + /** + * Take a Duplexify stream, fetch an authenticated connection header, and + * create an outgoing writable stream. + * + * @param {Duplexify} dup - Duplexify stream. + * @param {object} options - Configuration object. + * @param {module:common/connection} options.connection - A connection instance used to get a token with and send the request through. + * @param {object} options.metadata - Metadata to send at the head of the request. + * @param {object} options.request - Request object, in the format of a standard Node.js http.request() object. + * @param {string=} options.request.method - Default: "POST". + * @param {string=} options.request.qs.uploadType - Default: "multipart". + * @param {string=} options.streamContentType - Default: "application/octet-stream". + * @param {function} onComplete - Callback, executed after the writable Request stream has completed. + */ + makeWritableStream(dup, options, onComplete) { + var _a; + onComplete = onComplete || util.noop; + const writeStream = new ProgressStream(); + writeStream.on('progress', evt => dup.emit('progress', evt)); + dup.setWritable(writeStream); + const defaultReqOpts = { + method: 'POST', + qs: { + uploadType: 'multipart', + }, + timeout: 0, + maxRetries: 0, + }; + const metadata = options.metadata || {}; + const reqOpts = { + ...defaultReqOpts, + ...options.request, + qs: { + ...defaultReqOpts.qs, + ...(_a = options.request) === null || _a === void 0 ? void 0 : _a.qs, + }, + multipart: [ + { + 'Content-Type': 'application/json', + body: JSON.stringify(metadata), + }, + { + 'Content-Type': metadata.contentType || 'application/octet-stream', + body: writeStream, + }, + ], + }; + options.makeAuthenticatedRequest(reqOpts, { + onAuthenticated(err, authenticatedReqOpts) { + if (err) { + dup.destroy(err); + return; + } + requestDefaults.headers = util._getDefaultHeaders(reqOpts[exports.GCCL_GCS_CMD_KEY]); + const request = teeny_request_1.teenyRequest.defaults(requestDefaults); + request(authenticatedReqOpts, (err, resp, body) => { + util.handleResp(err, resp, body, (err, data) => { + if (err) { + dup.destroy(err); + return; + } + dup.emit('response', resp); + onComplete(data); + }); + }); + }, + }); + } + /** + * Returns true if the API request should be retried, given the error that was + * given the first time the request was attempted. This is used for rate limit + * related errors as well as intermittent server errors. + * + * @param {error} err - The API error to check if it is appropriate to retry. + * @return {boolean} True if the API request should be retried, false otherwise. + */ + shouldRetryRequest(err) { + if (err) { + if ([408, 429, 500, 502, 503, 504].indexOf(err.code) !== -1) { + return true; + } + if (err.errors) { + for (const e of err.errors) { + const reason = e.reason; + if (reason === 'rateLimitExceeded') { + return true; + } + if (reason === 'userRateLimitExceeded') { + return true; + } + if (reason && reason.includes('EAI_AGAIN')) { + return true; + } + } + } + } + return false; + } + /** + * Get a function for making authenticated requests. + * + * @param {object} config - Configuration object. + * @param {boolean=} config.autoRetry - Automatically retry requests if the + * response is related to rate limits or certain intermittent server + * errors. We will exponentially backoff subsequent requests by default. + * (default: true) + * @param {object=} config.credentials - Credentials object. + * @param {boolean=} config.customEndpoint - If true, just return the provided request options. Default: false. + * @param {boolean=} config.useAuthWithCustomEndpoint - If true, will authenticate when using a custom endpoint. Default: false. + * @param {string=} config.email - Account email address, required for PEM/P12 usage. + * @param {number=} config.maxRetries - Maximum number of automatic retries attempted before returning the error. (default: 3) + * @param {string=} config.keyFile - Path to a .json, .pem, or .p12 keyfile. + * @param {array} config.scopes - Array of scopes required for the API. + */ + makeAuthenticatedRequestFactory(config) { + const googleAutoAuthConfig = { ...config }; + if (googleAutoAuthConfig.projectId === service_js_1.DEFAULT_PROJECT_ID_TOKEN) { + delete googleAutoAuthConfig.projectId; + } + let authClient; + if (googleAutoAuthConfig.authClient instanceof google_auth_library_1.GoogleAuth) { + // Use an existing `GoogleAuth` + authClient = googleAutoAuthConfig.authClient; + } + else { + // Pass an `AuthClient` & `clientOptions` to `GoogleAuth`, if available + authClient = new google_auth_library_1.GoogleAuth({ + ...googleAutoAuthConfig, + authClient: googleAutoAuthConfig.authClient, + clientOptions: googleAutoAuthConfig.clientOptions, + }); + } + function makeAuthenticatedRequest(reqOpts, optionsOrCallback) { + let stream; + let projectId; + const reqConfig = { ...config }; + let activeRequest_; + if (!optionsOrCallback) { + stream = (0, duplexify_1.default)(); + reqConfig.stream = stream; + } + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : undefined; + const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : undefined; + async function setProjectId() { + projectId = await authClient.getProjectId(); + } + const onAuthenticated = async (err, authenticatedReqOpts) => { + const authLibraryError = err; + const autoAuthFailed = err && + typeof err.message === 'string' && + err.message.indexOf('Could not load the default credentials') > -1; + if (autoAuthFailed) { + // Even though authentication failed, the API might not actually + // care. + authenticatedReqOpts = reqOpts; + } + if (!err || autoAuthFailed) { + try { + // Try with existing `projectId` value + authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId); + err = null; + } + catch (e) { + if (e instanceof projectify_1.MissingProjectIdError) { + // A `projectId` was required, but we don't have one. + try { + // Attempt to get the `projectId` + await setProjectId(); + authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId); + err = null; + } + catch (e) { + // Re-use the "Could not load the default credentials error" if + // auto auth failed. + err = err || e; + } + } + else { + // Some other error unrelated to missing `projectId` + err = err || e; + } + } + } + if (err) { + if (stream) { + stream.destroy(err); + } + else { + const fn = options && options.onAuthenticated + ? options.onAuthenticated + : callback; + fn(err); + } + return; + } + if (options && options.onAuthenticated) { + options.onAuthenticated(null, authenticatedReqOpts); + } + else { + activeRequest_ = util.makeRequest(authenticatedReqOpts, reqConfig, (apiResponseError, ...params) => { + if (apiResponseError && + apiResponseError.code === 401 && + authLibraryError) { + // Re-use the "Could not load the default credentials error" if + // the API request failed due to missing credentials. + apiResponseError = authLibraryError; + } + callback(apiResponseError, ...params); + }); + } + }; + const prepareRequest = async () => { + try { + const getProjectId = async () => { + if (config.projectId && + config.projectId !== service_js_1.DEFAULT_PROJECT_ID_TOKEN) { + // The user provided a project ID. We don't need to check with the + // auth client, it could be incorrect. + return config.projectId; + } + if (config.projectIdRequired === false) { + // A projectId is not required. Return the default. + return service_js_1.DEFAULT_PROJECT_ID_TOKEN; + } + return setProjectId(); + }; + const authorizeRequest = async () => { + if (reqConfig.customEndpoint && + !reqConfig.useAuthWithCustomEndpoint) { + // Using a custom API override. Do not use `google-auth-library` for + // authentication. (ex: connecting to a local Datastore server) + return reqOpts; + } + else { + return authClient.authorizeRequest(reqOpts); + } + }; + const [_projectId, authorizedReqOpts] = await Promise.all([ + getProjectId(), + authorizeRequest(), + ]); + if (_projectId) { + projectId = _projectId; + } + return onAuthenticated(null, authorizedReqOpts); + } + catch (e) { + return onAuthenticated(e); + } + }; + prepareRequest(); + if (stream) { + return stream; + } + return { + abort() { + setImmediate(() => { + if (activeRequest_) { + activeRequest_.abort(); + activeRequest_ = null; + } + }); + }, + }; + } + const mar = makeAuthenticatedRequest; + mar.getCredentials = authClient.getCredentials.bind(authClient); + mar.authClient = authClient; + return mar; + } + /** + * Make a request through the `retryRequest` module with built-in error + * handling and exponential back off. + * + * @param {object} reqOpts - Request options in the format `request` expects. + * @param {object=} config - Configuration object. + * @param {boolean=} config.autoRetry - Automatically retry requests if the + * response is related to rate limits or certain intermittent server + * errors. We will exponentially backoff subsequent requests by default. + * (default: true) + * @param {number=} config.maxRetries - Maximum number of automatic retries + * attempted before returning the error. (default: 3) + * @param {object=} config.request - HTTP module for request calls. + * @param {function} callback - The callback function. + */ + makeRequest(reqOpts, config, callback) { + var _a, _b, _c, _d, _e; + let autoRetryValue = AUTO_RETRY_DEFAULT; + if (config.autoRetry !== undefined) { + autoRetryValue = config.autoRetry; + } + else if (((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.autoRetry) !== undefined) { + autoRetryValue = config.retryOptions.autoRetry; + } + let maxRetryValue = MAX_RETRY_DEFAULT; + if (config.maxRetries !== undefined) { + maxRetryValue = config.maxRetries; + } + else if (((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.maxRetries) !== undefined) { + maxRetryValue = config.retryOptions.maxRetries; + } + requestDefaults.headers = this._getDefaultHeaders(reqOpts[exports.GCCL_GCS_CMD_KEY]); + const options = { + request: teeny_request_1.teenyRequest.defaults(requestDefaults), + retries: autoRetryValue !== false ? maxRetryValue : 0, + noResponseRetries: autoRetryValue !== false ? maxRetryValue : 0, + shouldRetryFn(httpRespMessage) { + var _a, _b; + const err = util.parseHttpRespMessage(httpRespMessage).err; + if ((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.retryableErrorFn) { + return err && ((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.retryableErrorFn(err)); + } + return err && util.shouldRetryRequest(err); + }, + maxRetryDelay: (_c = config.retryOptions) === null || _c === void 0 ? void 0 : _c.maxRetryDelay, + retryDelayMultiplier: (_d = config.retryOptions) === null || _d === void 0 ? void 0 : _d.retryDelayMultiplier, + totalTimeout: (_e = config.retryOptions) === null || _e === void 0 ? void 0 : _e.totalTimeout, + }; + if (typeof reqOpts.maxRetries === 'number') { + options.retries = reqOpts.maxRetries; + options.noResponseRetries = reqOpts.maxRetries; + } + if (!config.stream) { + return (0, retry_request_1.default)(reqOpts, options, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (err, response, body) => { + util.handleResp(err, response, body, callback); + }); + } + const dup = config.stream; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let requestStream; + const isGetRequest = (reqOpts.method || 'GET').toUpperCase() === 'GET'; + if (isGetRequest) { + requestStream = (0, retry_request_1.default)(reqOpts, options); + dup.setReadable(requestStream); + } + else { + // Streaming writable HTTP requests cannot be retried. + requestStream = options.request(reqOpts); + dup.setWritable(requestStream); + } + // Replay the Request events back to the stream. + requestStream + .on('error', dup.destroy.bind(dup)) + .on('response', dup.emit.bind(dup, 'response')) + .on('complete', dup.emit.bind(dup, 'complete')); + dup.abort = requestStream.abort; + return dup; + } + /** + * Decorate the options about to be made in a request. + * + * @param {object} reqOpts - The options to be passed to `request`. + * @param {string} projectId - The project ID. + * @return {object} reqOpts - The decorated reqOpts. + */ + decorateRequest(reqOpts, projectId) { + delete reqOpts.autoPaginate; + delete reqOpts.autoPaginateVal; + delete reqOpts.objectMode; + if (reqOpts.qs !== null && typeof reqOpts.qs === 'object') { + delete reqOpts.qs.autoPaginate; + delete reqOpts.qs.autoPaginateVal; + reqOpts.qs = (0, projectify_1.replaceProjectIdToken)(reqOpts.qs, projectId); + } + if (Array.isArray(reqOpts.multipart)) { + reqOpts.multipart = reqOpts.multipart.map(part => { + return (0, projectify_1.replaceProjectIdToken)(part, projectId); + }); + } + if (reqOpts.json !== null && typeof reqOpts.json === 'object') { + delete reqOpts.json.autoPaginate; + delete reqOpts.json.autoPaginateVal; + reqOpts.json = (0, projectify_1.replaceProjectIdToken)(reqOpts.json, projectId); + } + reqOpts.uri = (0, projectify_1.replaceProjectIdToken)(reqOpts.uri, projectId); + return reqOpts; + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + isCustomType(unknown, module) { + function getConstructorName(obj) { + return obj.constructor && obj.constructor.name.toLowerCase(); + } + const moduleNameParts = module.split('/'); + const parentModuleName = moduleNameParts[0] && moduleNameParts[0].toLowerCase(); + const subModuleName = moduleNameParts[1] && moduleNameParts[1].toLowerCase(); + if (subModuleName && getConstructorName(unknown) !== subModuleName) { + return false; + } + let walkingModule = unknown; + // eslint-disable-next-line no-constant-condition + while (true) { + if (getConstructorName(walkingModule) === parentModuleName) { + return true; + } + walkingModule = walkingModule.parent; + if (!walkingModule) { + return false; + } + } + } + /** + * Given two parameters, figure out if this is either: + * - Just a callback function + * - An options object, and then a callback function + * @param optionsOrCallback An options object or callback. + * @param cb A potentially undefined callback. + */ + maybeOptionsOrCallback(optionsOrCallback, cb) { + return typeof optionsOrCallback === 'function' + ? [{}, optionsOrCallback] + : [optionsOrCallback, cb]; + } + _getDefaultHeaders(gcclGcsCmd) { + const headers = { + 'User-Agent': (0, util_js_1.getUserAgentString)(), + 'x-goog-api-client': `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${uuid.v4()}`, + }; + if (gcclGcsCmd) { + headers['x-goog-api-client'] += ` gccl-gcs-cmd/${gcclGcsCmd}`; + } + return headers; + } +} +exports.Util = Util; +/** + * Basic Passthrough Stream that records the number of bytes read + * every time the cursor is moved. + */ +class ProgressStream extends stream_1.Transform { + constructor() { + super(...arguments); + this.bytesRead = 0; + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + _transform(chunk, encoding, callback) { + this.bytesRead += chunk.length; + this.emit('progress', { bytesWritten: this.bytesRead, contentLength: '*' }); + this.push(chunk); + callback(); + } +} +const util = new Util(); +exports.util = util; + + +/***/ }), + +/***/ 8598: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Notification = void 0; +const index_js_1 = __nccwpck_require__(1325); +const promisify_1 = __nccwpck_require__(206); +/** + * The API-formatted resource description of the notification. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name Notification#metadata + * @type {object} + */ +/** + * A Notification object is created from your {@link Bucket} object using + * {@link Bucket#notification}. Use it to interact with Cloud Pub/Sub + * notifications. + * + * See {@link https://cloud.google.com/storage/docs/pubsub-notifications| Cloud Pub/Sub Notifications for Google Cloud Storage} + * + * @class + * @hideconstructor + * + * @param {Bucket} bucket The bucket instance this notification is attached to. + * @param {string} id The ID of the notification. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const notification = myBucket.notification('1'); + * ``` + */ +class Notification extends index_js_1.ServiceObject { + constructor(bucket, id) { + const requestQueryObject = {}; + const methods = { + /** + * Creates a notification subscription for the bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/insert| Notifications: insert} + * @method Notification#create + * + * @param {Topic|string} topic The Cloud PubSub topic to which this + * subscription publishes. If the project ID is omitted, the current + * project ID will be used. + * + * Acceptable formats are: + * - `projects/grape-spaceship-123/topics/my-topic` + * + * - `my-topic` + * @param {CreateNotificationRequest} [options] Metadata to set for + * the notification. + * @param {CreateNotificationCallback} [callback] Callback function. + * @returns {Promise} + * @throws {Error} If a valid topic is not provided. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.create(function(err, notification, apiResponse) { + * if (!err) { + * // The notification was created successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.create().then(function(data) { + * const notification = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + create: true, + /** + * @typedef {array} DeleteNotificationResponse + * @property {object} 0 The full API response. + */ + /** + * Permanently deletes a notification subscription. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/delete| Notifications: delete API Documentation} + * + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {DeleteNotificationCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.delete(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.delete().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/deleteNotification.js + * region_tag:storage_delete_bucket_notification + * Another example: + */ + delete: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * Get a notification and its metadata if it exists. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/get| Notifications: get API Documentation} + * + * @param {object} [options] Configuration options. + * See {@link Bucket#createNotification} for create options. + * @param {boolean} [options.autoCreate] Automatically create the object if + * it does not exist. Default: `false`. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetNotificationCallback} [callback] Callback function. + * @return {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.get(function(err, notification, apiResponse) { + * // `notification.metadata` has been populated. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.get().then(function(data) { + * const notification = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + get: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * Get the notification's metadata. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/get| Notifications: get API Documentation} + * + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetNotificationMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.getMetadata(function(err, metadata, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.getMetadata().then(function(data) { + * const metadata = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/getMetadataNotifications.js + * region_tag:storage_print_pubsub_bucket_notification + * Another example: + */ + getMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} NotificationExistsResponse + * @property {boolean} 0 Whether the notification exists or not. + */ + /** + * @callback NotificationExistsCallback + * @param {?Error} err Request error, if any. + * @param {boolean} exists Whether the notification exists or not. + */ + /** + * Check if the notification exists. + * + * @method Notification#exists + * @param {NotificationExistsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.exists(function(err, exists) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.exists().then(function(data) { + * const exists = data[0]; + * }); + * ``` + */ + exists: true, + }; + super({ + parent: bucket, + baseUrl: '/notificationConfigs', + id: id.toString(), + createMethod: bucket.createNotification.bind(bucket), + methods, + }); + } +} +exports.Notification = Notification; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Notification); + + +/***/ }), + +/***/ 8043: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +var _Upload_instances, _Upload_gcclGcsCmd, _Upload_resetLocalBuffersCache, _Upload_addLocalBufferCache; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Upload = exports.PROTOCOL_REGEX = void 0; +exports.upload = upload; +exports.createURI = createURI; +exports.checkUploadStatus = checkUploadStatus; +const abort_controller_1 = __importDefault(__nccwpck_require__(7413)); +const crypto_1 = __nccwpck_require__(6982); +const gaxios = __importStar(__nccwpck_require__(7003)); +const google_auth_library_1 = __nccwpck_require__(492); +const stream_1 = __nccwpck_require__(2203); +const async_retry_1 = __importDefault(__nccwpck_require__(5195)); +const uuid = __importStar(__nccwpck_require__(2048)); +const util_js_1 = __nccwpck_require__(4557); +const util_js_2 = __nccwpck_require__(7325); +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +const package_json_helper_cjs_1 = __nccwpck_require__(1969); +const NOT_FOUND_STATUS_CODE = 404; +const RESUMABLE_INCOMPLETE_STATUS_CODE = 308; +const packageJson = (0, package_json_helper_cjs_1.getPackageJSON)(); +exports.PROTOCOL_REGEX = /^(\w*):\/\//; +class Upload extends stream_1.Writable { + constructor(cfg) { + var _a; + super(cfg); + _Upload_instances.add(this); + this.numBytesWritten = 0; + this.numRetries = 0; + this.currentInvocationId = { + checkUploadStatus: uuid.v4(), + chunk: uuid.v4(), + uri: uuid.v4(), + }; + /** + * A cache of buffers written to this instance, ready for consuming + */ + this.writeBuffers = []; + this.numChunksReadInRequest = 0; + /** + * An array of buffers used for caching the most recent upload chunk. + * We should not assume that the server received all bytes sent in the request. + * - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + */ + this.localWriteCache = []; + this.localWriteCacheByteLength = 0; + this.upstreamEnded = false; + _Upload_gcclGcsCmd.set(this, void 0); + cfg = cfg || {}; + if (!cfg.bucket || !cfg.file) { + throw new Error('A bucket and file name are required'); + } + if (cfg.offset && !cfg.uri) { + throw new RangeError('Cannot provide an `offset` without providing a `uri`'); + } + if (cfg.isPartialUpload && !cfg.chunkSize) { + throw new RangeError('Cannot set `isPartialUpload` without providing a `chunkSize`'); + } + cfg.authConfig = cfg.authConfig || {}; + cfg.authConfig.scopes = [ + 'https://www.googleapis.com/auth/devstorage.full_control', + ]; + this.authClient = cfg.authClient || new google_auth_library_1.GoogleAuth(cfg.authConfig); + const universe = cfg.universeDomain || google_auth_library_1.DEFAULT_UNIVERSE; + this.apiEndpoint = `https://storage.${universe}`; + if (cfg.apiEndpoint && cfg.apiEndpoint !== this.apiEndpoint) { + this.apiEndpoint = this.sanitizeEndpoint(cfg.apiEndpoint); + const hostname = new URL(this.apiEndpoint).hostname; + // check if it is a domain of a known universe + const isDomain = hostname === universe; + const isDefaultUniverseDomain = hostname === google_auth_library_1.DEFAULT_UNIVERSE; + // check if it is a subdomain of a known universe + // by checking a last (universe's length + 1) of a hostname + const isSubDomainOfUniverse = hostname.slice(-(universe.length + 1)) === `.${universe}`; + const isSubDomainOfDefaultUniverse = hostname.slice(-(google_auth_library_1.DEFAULT_UNIVERSE.length + 1)) === + `.${google_auth_library_1.DEFAULT_UNIVERSE}`; + if (!isDomain && + !isDefaultUniverseDomain && + !isSubDomainOfUniverse && + !isSubDomainOfDefaultUniverse) { + // a custom, non-universe domain, + // use gaxios + this.authClient = gaxios; + } + } + this.baseURI = `${this.apiEndpoint}/upload/storage/v1/b`; + this.bucket = cfg.bucket; + const cacheKeyElements = [cfg.bucket, cfg.file]; + if (typeof cfg.generation === 'number') { + cacheKeyElements.push(`${cfg.generation}`); + } + this.cacheKey = cacheKeyElements.join('/'); + this.customRequestOptions = cfg.customRequestOptions || {}; + this.file = cfg.file; + this.generation = cfg.generation; + this.kmsKeyName = cfg.kmsKeyName; + this.metadata = cfg.metadata || {}; + this.offset = cfg.offset; + this.origin = cfg.origin; + this.params = cfg.params || {}; + this.userProject = cfg.userProject; + this.chunkSize = cfg.chunkSize; + this.retryOptions = cfg.retryOptions; + this.isPartialUpload = (_a = cfg.isPartialUpload) !== null && _a !== void 0 ? _a : false; + if (cfg.key) { + if (typeof cfg.key === 'string') { + const base64Key = Buffer.from(cfg.key).toString('base64'); + this.encryption = { + key: base64Key, + hash: (0, crypto_1.createHash)('sha256').update(cfg.key).digest('base64'), + }; + } + else { + const base64Key = cfg.key.toString('base64'); + this.encryption = { + key: base64Key, + hash: (0, crypto_1.createHash)('sha256').update(cfg.key).digest('base64'), + }; + } + } + this.predefinedAcl = cfg.predefinedAcl; + if (cfg.private) + this.predefinedAcl = 'private'; + if (cfg.public) + this.predefinedAcl = 'publicRead'; + const autoRetry = cfg.retryOptions.autoRetry; + this.uriProvidedManually = !!cfg.uri; + this.uri = cfg.uri; + if (this.offset) { + // we're resuming an incomplete upload + this.numBytesWritten = this.offset; + } + this.numRetries = 0; // counter for number of retries currently executed + if (!autoRetry) { + cfg.retryOptions.maxRetries = 0; + } + this.timeOfFirstRequest = Date.now(); + const contentLength = cfg.metadata + ? Number(cfg.metadata.contentLength) + : NaN; + this.contentLength = isNaN(contentLength) ? '*' : contentLength; + __classPrivateFieldSet(this, _Upload_gcclGcsCmd, cfg[util_js_2.GCCL_GCS_CMD_KEY], "f"); + this.once('writing', () => { + if (this.uri) { + this.continueUploading(); + } + else { + this.createURI(err => { + if (err) { + return this.destroy(err); + } + this.startUploading(); + return; + }); + } + }); + } + /** + * Prevent 'finish' event until the upload has succeeded. + * + * @param fireFinishEvent The finish callback + */ + _final(fireFinishEvent = () => { }) { + this.upstreamEnded = true; + this.once('uploadFinished', fireFinishEvent); + process.nextTick(() => { + this.emit('upstreamFinished'); + // it's possible `_write` may not be called - namely for empty object uploads + this.emit('writing'); + }); + } + /** + * Handles incoming data from upstream + * + * @param chunk The chunk to append to the buffer + * @param encoding The encoding of the chunk + * @param readCallback A callback for when the buffer has been read downstream + */ + _write(chunk, encoding, readCallback = () => { }) { + // Backwards-compatible event + this.emit('writing'); + this.writeBuffers.push(typeof chunk === 'string' ? Buffer.from(chunk, encoding) : chunk); + this.once('readFromChunkBuffer', readCallback); + process.nextTick(() => this.emit('wroteToChunkBuffer')); + } + /** + * Prepends the local buffer to write buffer and resets it. + * + * @param keepLastBytes number of bytes to keep from the end of the local buffer. + */ + prependLocalBufferToUpstream(keepLastBytes) { + // Typically, the upstream write buffers should be smaller than the local + // cache, so we can save time by setting the local cache as the new + // upstream write buffer array and appending the old array to it + let initialBuffers = []; + if (keepLastBytes) { + // we only want the last X bytes + let bytesKept = 0; + while (keepLastBytes > bytesKept) { + // load backwards because we want the last X bytes + // note: `localWriteCacheByteLength` is reset below + let buf = this.localWriteCache.pop(); + if (!buf) + break; + bytesKept += buf.byteLength; + if (bytesKept > keepLastBytes) { + // we have gone over the amount desired, let's keep the last X bytes + // of this buffer + const diff = bytesKept - keepLastBytes; + buf = buf.subarray(diff); + bytesKept -= diff; + } + initialBuffers.unshift(buf); + } + } + else { + // we're keeping all of the local cache, simply use it as the initial buffer + initialBuffers = this.localWriteCache; + } + // Append the old upstream to the new + const append = this.writeBuffers; + this.writeBuffers = initialBuffers; + for (const buf of append) { + this.writeBuffers.push(buf); + } + // reset last buffers sent + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); + } + /** + * Retrieves data from upstream's buffer. + * + * @param limit The maximum amount to return from the buffer. + */ + *pullFromChunkBuffer(limit) { + while (limit) { + const buf = this.writeBuffers.shift(); + if (!buf) + break; + let bufToYield = buf; + if (buf.byteLength > limit) { + bufToYield = buf.subarray(0, limit); + this.writeBuffers.unshift(buf.subarray(limit)); + limit = 0; + } + else { + limit -= buf.byteLength; + } + yield bufToYield; + // Notify upstream we've read from the buffer and we're able to consume + // more. It can also potentially send more data down as we're currently + // iterating. + this.emit('readFromChunkBuffer'); + } + } + /** + * A handler for determining if data is ready to be read from upstream. + * + * @returns If there will be more chunks to read in the future + */ + async waitForNextChunk() { + const willBeMoreChunks = await new Promise(resolve => { + // There's data available - it should be digested + if (this.writeBuffers.length) { + return resolve(true); + } + // The upstream writable ended, we shouldn't expect any more data. + if (this.upstreamEnded) { + return resolve(false); + } + // Nothing immediate seems to be determined. We need to prepare some + // listeners to determine next steps... + const wroteToChunkBufferCallback = () => { + removeListeners(); + return resolve(true); + }; + const upstreamFinishedCallback = () => { + removeListeners(); + // this should be the last chunk, if there's anything there + if (this.writeBuffers.length) + return resolve(true); + return resolve(false); + }; + // Remove listeners when we're ready to callback. + const removeListeners = () => { + this.removeListener('wroteToChunkBuffer', wroteToChunkBufferCallback); + this.removeListener('upstreamFinished', upstreamFinishedCallback); + }; + // If there's data recently written it should be digested + this.once('wroteToChunkBuffer', wroteToChunkBufferCallback); + // If the upstream finishes let's see if there's anything to grab + this.once('upstreamFinished', upstreamFinishedCallback); + }); + return willBeMoreChunks; + } + /** + * Reads data from upstream up to the provided `limit`. + * Ends when the limit has reached or no data is expected to be pushed from upstream. + * + * @param limit The most amount of data this iterator should return. `Infinity` by default. + */ + async *upstreamIterator(limit = Infinity) { + // read from upstream chunk buffer + while (limit && (await this.waitForNextChunk())) { + // read until end or limit has been reached + for (const chunk of this.pullFromChunkBuffer(limit)) { + limit -= chunk.byteLength; + yield chunk; + } + } + } + createURI(callback) { + if (!callback) { + return this.createURIAsync(); + } + this.createURIAsync().then(r => callback(null, r), callback); + } + async createURIAsync() { + const metadata = { ...this.metadata }; + const headers = {}; + // Delete content length and content type from metadata if they exist. + // These are headers and should not be sent as part of the metadata. + if (metadata.contentLength) { + headers['X-Upload-Content-Length'] = metadata.contentLength.toString(); + delete metadata.contentLength; + } + if (metadata.contentType) { + headers['X-Upload-Content-Type'] = metadata.contentType; + delete metadata.contentType; + } + let googAPIClient = `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${this.currentInvocationId.uri}`; + if (__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")) { + googAPIClient += ` gccl-gcs-cmd/${__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")}`; + } + // Check if headers already exist before creating new ones + const reqOpts = { + method: 'POST', + url: [this.baseURI, this.bucket, 'o'].join('/'), + params: Object.assign({ + name: this.file, + uploadType: 'resumable', + }, this.params), + data: metadata, + headers: { + 'User-Agent': (0, util_js_1.getUserAgentString)(), + 'x-goog-api-client': googAPIClient, + ...headers, + }, + }; + if (metadata.contentLength) { + reqOpts.headers['X-Upload-Content-Length'] = + metadata.contentLength.toString(); + } + if (metadata.contentType) { + reqOpts.headers['X-Upload-Content-Type'] = metadata.contentType; + } + if (typeof this.generation !== 'undefined') { + reqOpts.params.ifGenerationMatch = this.generation; + } + if (this.kmsKeyName) { + reqOpts.params.kmsKeyName = this.kmsKeyName; + } + if (this.predefinedAcl) { + reqOpts.params.predefinedAcl = this.predefinedAcl; + } + if (this.origin) { + reqOpts.headers.Origin = this.origin; + } + const uri = await (0, async_retry_1.default)(async (bail) => { + var _a, _b, _c; + try { + const res = await this.makeRequest(reqOpts); + // We have successfully got a URI we can now create a new invocation id + this.currentInvocationId.uri = uuid.v4(); + return res.headers.location; + } + catch (err) { + const e = err; + const apiError = { + code: (_a = e.response) === null || _a === void 0 ? void 0 : _a.status, + name: (_b = e.response) === null || _b === void 0 ? void 0 : _b.statusText, + message: (_c = e.response) === null || _c === void 0 ? void 0 : _c.statusText, + errors: [ + { + reason: e.code, + }, + ], + }; + if (this.retryOptions.maxRetries > 0 && + this.retryOptions.retryableErrorFn(apiError)) { + throw e; + } + else { + return bail(e); + } + } + }, { + retries: this.retryOptions.maxRetries, + factor: this.retryOptions.retryDelayMultiplier, + maxTimeout: this.retryOptions.maxRetryDelay * 1000, //convert to milliseconds + maxRetryTime: this.retryOptions.totalTimeout * 1000, //convert to milliseconds + }); + this.uri = uri; + this.offset = 0; + // emit the newly generated URI for future reuse, if necessary. + this.emit('uri', uri); + return uri; + } + async continueUploading() { + var _a; + (_a = this.offset) !== null && _a !== void 0 ? _a : (await this.getAndSetOffset()); + return this.startUploading(); + } + async startUploading() { + const multiChunkMode = !!this.chunkSize; + let responseReceived = false; + this.numChunksReadInRequest = 0; + if (!this.offset) { + this.offset = 0; + } + // Check if the offset (server) is too far behind the current stream + if (this.offset < this.numBytesWritten) { + const delta = this.numBytesWritten - this.offset; + const message = `The offset is lower than the number of bytes written. The server has ${this.offset} bytes and while ${this.numBytesWritten} bytes has been uploaded - thus ${delta} bytes are missing. Stopping as this could result in data loss. Initiate a new upload to continue.`; + this.emit('error', new RangeError(message)); + return; + } + // Check if we should 'fast-forward' to the relevant data to upload + if (this.numBytesWritten < this.offset) { + // 'fast-forward' to the byte where we need to upload. + // only push data from the byte after the one we left off on + const fastForwardBytes = this.offset - this.numBytesWritten; + for await (const _chunk of this.upstreamIterator(fastForwardBytes)) { + _chunk; // discard the data up until the point we want + } + this.numBytesWritten = this.offset; + } + let expectedUploadSize = undefined; + // Set `expectedUploadSize` to `contentLength - this.numBytesWritten`, if available + if (typeof this.contentLength === 'number') { + expectedUploadSize = this.contentLength - this.numBytesWritten; + } + // `expectedUploadSize` should be no more than the `chunkSize`. + // It's possible this is the last chunk request for a multiple + // chunk upload, thus smaller than the chunk size. + if (this.chunkSize) { + expectedUploadSize = expectedUploadSize + ? Math.min(this.chunkSize, expectedUploadSize) + : this.chunkSize; + } + // A queue for the upstream data + const upstreamQueue = this.upstreamIterator(expectedUploadSize); + // The primary read stream for this request. This stream retrieves no more + // than the exact requested amount from upstream. + const requestStream = new stream_1.Readable({ + read: async () => { + // Don't attempt to retrieve data upstream if we already have a response + if (responseReceived) + requestStream.push(null); + const result = await upstreamQueue.next(); + if (result.value) { + this.numChunksReadInRequest++; + if (multiChunkMode) { + // save ever buffer used in the request in multi-chunk mode + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_addLocalBufferCache).call(this, result.value); + } + else { + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_addLocalBufferCache).call(this, result.value); + } + this.numBytesWritten += result.value.byteLength; + this.emit('progress', { + bytesWritten: this.numBytesWritten, + contentLength: this.contentLength, + }); + requestStream.push(result.value); + } + if (result.done) { + requestStream.push(null); + } + }, + }); + let googAPIClient = `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${this.currentInvocationId.chunk}`; + if (__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")) { + googAPIClient += ` gccl-gcs-cmd/${__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")}`; + } + const headers = { + 'User-Agent': (0, util_js_1.getUserAgentString)(), + 'x-goog-api-client': googAPIClient, + }; + // If using multiple chunk upload, set appropriate header + if (multiChunkMode) { + // We need to know how much data is available upstream to set the `Content-Range` header. + // https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + for await (const chunk of this.upstreamIterator(expectedUploadSize)) { + // This will conveniently track and keep the size of the buffers. + // We will reach either the expected upload size or the remainder of the stream. + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_addLocalBufferCache).call(this, chunk); + } + // This is the sum from the `#addLocalBufferCache` calls + const bytesToUpload = this.localWriteCacheByteLength; + // Important: we want to know if the upstream has ended and the queue is empty before + // unshifting data back into the queue. This way we will know if this is the last request or not. + const isLastChunkOfUpload = !(await this.waitForNextChunk()); + // Important: put the data back in the queue for the actual upload + this.prependLocalBufferToUpstream(); + let totalObjectSize = this.contentLength; + if (typeof this.contentLength !== 'number' && + isLastChunkOfUpload && + !this.isPartialUpload) { + // Let's let the server know this is the last chunk of the object since we didn't set it before. + totalObjectSize = bytesToUpload + this.numBytesWritten; + } + // `- 1` as the ending byte is inclusive in the request. + const endingByte = bytesToUpload + this.numBytesWritten - 1; + // `Content-Length` for multiple chunk uploads is the size of the chunk, + // not the overall object + headers['Content-Length'] = bytesToUpload; + headers['Content-Range'] = + `bytes ${this.offset}-${endingByte}/${totalObjectSize}`; + } + else { + headers['Content-Range'] = `bytes ${this.offset}-*/${this.contentLength}`; + } + const reqOpts = { + method: 'PUT', + url: this.uri, + headers, + body: requestStream, + }; + try { + const resp = await this.makeRequestStream(reqOpts); + if (resp) { + responseReceived = true; + await this.responseHandler(resp); + } + } + catch (e) { + const err = e; + if (this.retryOptions.retryableErrorFn(err)) { + this.attemptDelayedRetry({ + status: NaN, + data: err, + }); + return; + } + this.destroy(err); + } + } + // Process the API response to look for errors that came in + // the response body. + async responseHandler(resp) { + if (resp.data.error) { + this.destroy(resp.data.error); + return; + } + // At this point we can safely create a new id for the chunk + this.currentInvocationId.chunk = uuid.v4(); + const moreDataToUpload = await this.waitForNextChunk(); + const shouldContinueWithNextMultiChunkRequest = this.chunkSize && + resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE && + resp.headers.range && + moreDataToUpload; + /** + * This is true when we're expecting to upload more data in a future request, + * yet the upstream for the upload session has been exhausted. + */ + const shouldContinueUploadInAnotherRequest = this.isPartialUpload && + resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE && + !moreDataToUpload; + if (shouldContinueWithNextMultiChunkRequest) { + // Use the upper value in this header to determine where to start the next chunk. + // We should not assume that the server received all bytes sent in the request. + // https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + const range = resp.headers.range; + this.offset = Number(range.split('-')[1]) + 1; + // We should not assume that the server received all bytes sent in the request. + // - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + const missingBytes = this.numBytesWritten - this.offset; + if (missingBytes) { + // As multi-chunk uploads send one chunk per request and pulls one + // chunk into the pipeline, prepending the missing bytes back should + // be fine for the next request. + this.prependLocalBufferToUpstream(missingBytes); + this.numBytesWritten -= missingBytes; + } + else { + // No bytes missing - no need to keep the local cache + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); + } + // continue uploading next chunk + this.continueUploading(); + } + else if (!this.isSuccessfulResponse(resp.status) && + !shouldContinueUploadInAnotherRequest) { + const err = new Error('Upload failed'); + err.code = resp.status; + err.name = 'Upload failed'; + if (resp === null || resp === void 0 ? void 0 : resp.data) { + err.errors = [resp === null || resp === void 0 ? void 0 : resp.data]; + } + this.destroy(err); + } + else { + // no need to keep the cache + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); + if (resp && resp.data) { + resp.data.size = Number(resp.data.size); + } + this.emit('metadata', resp.data); + // Allow the object (Upload) to continue naturally so the user's + // "finish" event fires. + this.emit('uploadFinished'); + } + } + /** + * Check the status of an existing resumable upload. + * + * @param cfg A configuration to use. `uri` is required. + * @returns the current upload status + */ + async checkUploadStatus(config = {}) { + let googAPIClient = `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${this.currentInvocationId.checkUploadStatus}`; + if (__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")) { + googAPIClient += ` gccl-gcs-cmd/${__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")}`; + } + const opts = { + method: 'PUT', + url: this.uri, + headers: { + 'Content-Length': 0, + 'Content-Range': 'bytes */*', + 'User-Agent': (0, util_js_1.getUserAgentString)(), + 'x-goog-api-client': googAPIClient, + }, + }; + try { + const resp = await this.makeRequest(opts); + // Successfully got the offset we can now create a new offset invocation id + this.currentInvocationId.checkUploadStatus = uuid.v4(); + return resp; + } + catch (e) { + if (config.retry === false || + !(e instanceof Error) || + !this.retryOptions.retryableErrorFn(e)) { + throw e; + } + const retryDelay = this.getRetryDelay(); + if (retryDelay <= 0) { + throw e; + } + await new Promise(res => setTimeout(res, retryDelay)); + return this.checkUploadStatus(config); + } + } + async getAndSetOffset() { + try { + // we want to handle retries in this method. + const resp = await this.checkUploadStatus({ retry: false }); + if (resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE) { + if (typeof resp.headers.range === 'string') { + this.offset = Number(resp.headers.range.split('-')[1]) + 1; + return; + } + } + this.offset = 0; + } + catch (e) { + const err = e; + if (this.retryOptions.retryableErrorFn(err)) { + this.attemptDelayedRetry({ + status: NaN, + data: err, + }); + return; + } + this.destroy(err); + } + } + async makeRequest(reqOpts) { + if (this.encryption) { + reqOpts.headers = reqOpts.headers || {}; + reqOpts.headers['x-goog-encryption-algorithm'] = 'AES256'; + reqOpts.headers['x-goog-encryption-key'] = this.encryption.key.toString(); + reqOpts.headers['x-goog-encryption-key-sha256'] = + this.encryption.hash.toString(); + } + if (this.userProject) { + reqOpts.params = reqOpts.params || {}; + reqOpts.params.userProject = this.userProject; + } + // Let gaxios know we will handle a 308 error code ourselves. + reqOpts.validateStatus = (status) => { + return (this.isSuccessfulResponse(status) || + status === RESUMABLE_INCOMPLETE_STATUS_CODE); + }; + const combinedReqOpts = { + ...this.customRequestOptions, + ...reqOpts, + headers: { + ...this.customRequestOptions.headers, + ...reqOpts.headers, + }, + }; + const res = await this.authClient.request(combinedReqOpts); + if (res.data && res.data.error) { + throw res.data.error; + } + return res; + } + async makeRequestStream(reqOpts) { + const controller = new abort_controller_1.default(); + const errorCallback = () => controller.abort(); + this.once('error', errorCallback); + if (this.userProject) { + reqOpts.params = reqOpts.params || {}; + reqOpts.params.userProject = this.userProject; + } + reqOpts.signal = controller.signal; + reqOpts.validateStatus = () => true; + const combinedReqOpts = { + ...this.customRequestOptions, + ...reqOpts, + headers: { + ...this.customRequestOptions.headers, + ...reqOpts.headers, + }, + }; + const res = await this.authClient.request(combinedReqOpts); + const successfulRequest = this.onResponse(res); + this.removeListener('error', errorCallback); + return successfulRequest ? res : null; + } + /** + * @return {bool} is the request good? + */ + onResponse(resp) { + if (resp.status !== 200 && + this.retryOptions.retryableErrorFn({ + code: resp.status, + message: resp.statusText, + name: resp.statusText, + })) { + this.attemptDelayedRetry(resp); + return false; + } + this.emit('response', resp); + return true; + } + /** + * @param resp GaxiosResponse object from previous attempt + */ + attemptDelayedRetry(resp) { + if (this.numRetries < this.retryOptions.maxRetries) { + if (resp.status === NOT_FOUND_STATUS_CODE && + this.numChunksReadInRequest === 0) { + this.startUploading(); + } + else { + const retryDelay = this.getRetryDelay(); + if (retryDelay <= 0) { + this.destroy(new Error(`Retry total time limit exceeded - ${JSON.stringify(resp.data)}`)); + return; + } + // Unshift the local cache back in case it's needed for the next request. + this.numBytesWritten -= this.localWriteCacheByteLength; + this.prependLocalBufferToUpstream(); + // We don't know how much data has been received by the server. + // `continueUploading` will recheck the offset via `getAndSetOffset`. + // If `offset` < `numberBytesReceived` then we will raise a RangeError + // as we've streamed too much data that has been missed - this should + // not be the case for multi-chunk uploads as `lastChunkSent` is the + // body of the entire request. + this.offset = undefined; + setTimeout(this.continueUploading.bind(this), retryDelay); + } + this.numRetries++; + } + else { + this.destroy(new Error(`Retry limit exceeded - ${JSON.stringify(resp.data)}`)); + } + } + /** + * The amount of time to wait before retrying the request, in milliseconds. + * If negative, do not retry. + * + * @returns the amount of time to wait, in milliseconds. + */ + getRetryDelay() { + const randomMs = Math.round(Math.random() * 1000); + const waitTime = Math.pow(this.retryOptions.retryDelayMultiplier, this.numRetries) * + 1000 + + randomMs; + const maxAllowableDelayMs = this.retryOptions.totalTimeout * 1000 - + (Date.now() - this.timeOfFirstRequest); + const maxRetryDelayMs = this.retryOptions.maxRetryDelay * 1000; + return Math.min(waitTime, maxRetryDelayMs, maxAllowableDelayMs); + } + /* + * Prepare user-defined API endpoint for compatibility with our API. + */ + sanitizeEndpoint(url) { + if (!exports.PROTOCOL_REGEX.test(url)) { + url = `https://${url}`; + } + return url.replace(/\/+$/, ''); // Remove trailing slashes + } + /** + * Check if a given status code is 2xx + * + * @param status The status code to check + * @returns if the status is 2xx + */ + isSuccessfulResponse(status) { + return status >= 200 && status < 300; + } +} +exports.Upload = Upload; +_Upload_gcclGcsCmd = new WeakMap(), _Upload_instances = new WeakSet(), _Upload_resetLocalBuffersCache = function _Upload_resetLocalBuffersCache() { + this.localWriteCache = []; + this.localWriteCacheByteLength = 0; +}, _Upload_addLocalBufferCache = function _Upload_addLocalBufferCache(buf) { + this.localWriteCache.push(buf); + this.localWriteCacheByteLength += buf.byteLength; +}; +function upload(cfg) { + return new Upload(cfg); +} +function createURI(cfg, callback) { + const up = new Upload(cfg); + if (!callback) { + return up.createURI(); + } + up.createURI().then(r => callback(null, r), callback); +} +/** + * Check the status of an existing resumable upload. + * + * @param cfg A configuration to use. `uri` is required. + * @returns the current upload status + */ +function checkUploadStatus(cfg) { + const up = new Upload(cfg); + return up.checkUploadStatus(); +} + + +/***/ }), + +/***/ 7319: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SigningError = exports.URLSigner = exports.PATH_STYLED_HOST = exports.SignerExceptionMessages = void 0; +const crypto = __importStar(__nccwpck_require__(6982)); +const url = __importStar(__nccwpck_require__(7016)); +const storage_js_1 = __nccwpck_require__(2848); +const util_js_1 = __nccwpck_require__(4557); +var SignerExceptionMessages; +(function (SignerExceptionMessages) { + SignerExceptionMessages["ACCESSIBLE_DATE_INVALID"] = "The accessible at date provided was invalid."; + SignerExceptionMessages["EXPIRATION_BEFORE_ACCESSIBLE_DATE"] = "An expiration date cannot be before accessible date."; + SignerExceptionMessages["X_GOOG_CONTENT_SHA256"] = "The header X-Goog-Content-SHA256 must be a hexadecimal string."; +})(SignerExceptionMessages || (exports.SignerExceptionMessages = SignerExceptionMessages = {})); +/* + * Default signing version for getSignedUrl is 'v2'. + */ +const DEFAULT_SIGNING_VERSION = 'v2'; +const SEVEN_DAYS = 7 * 24 * 60 * 60; +/** + * @const {string} + * @deprecated - unused + */ +exports.PATH_STYLED_HOST = 'https://storage.googleapis.com'; +class URLSigner { + constructor(auth, bucket, file, + /** + * A {@link Storage} object. + * + * @privateRemarks + * + * Technically this is a required field, however it would be a breaking change to + * move it before optional properties. In the next major we should refactor the + * constructor of this class to only accept a config object. + */ + storage = new storage_js_1.Storage()) { + this.auth = auth; + this.bucket = bucket; + this.file = file; + this.storage = storage; + } + getSignedUrl(cfg) { + const expiresInSeconds = this.parseExpires(cfg.expires); + const method = cfg.method; + const accessibleAtInSeconds = this.parseAccessibleAt(cfg.accessibleAt); + if (expiresInSeconds < accessibleAtInSeconds) { + throw new Error(SignerExceptionMessages.EXPIRATION_BEFORE_ACCESSIBLE_DATE); + } + let customHost; + // Default style is `path`. + const isVirtualHostedStyle = cfg.virtualHostedStyle || false; + if (cfg.cname) { + customHost = cfg.cname; + } + else if (isVirtualHostedStyle) { + customHost = `https://${this.bucket.name}.storage.${this.storage.universeDomain}`; + } + const secondsToMilliseconds = 1000; + const config = Object.assign({}, cfg, { + method, + expiration: expiresInSeconds, + accessibleAt: new Date(secondsToMilliseconds * accessibleAtInSeconds), + bucket: this.bucket.name, + file: this.file ? (0, util_js_1.encodeURI)(this.file.name, false) : undefined, + }); + if (customHost) { + config.cname = customHost; + } + const version = cfg.version || DEFAULT_SIGNING_VERSION; + let promise; + if (version === 'v2') { + promise = this.getSignedUrlV2(config); + } + else if (version === 'v4') { + promise = this.getSignedUrlV4(config); + } + else { + throw new Error(`Invalid signed URL version: ${version}. Supported versions are 'v2' and 'v4'.`); + } + return promise.then(query => { + var _a; + query = Object.assign(query, cfg.queryParams); + const signedUrl = new url.URL(((_a = cfg.host) === null || _a === void 0 ? void 0 : _a.toString()) || config.cname || this.storage.apiEndpoint); + signedUrl.pathname = this.getResourcePath(!!config.cname, this.bucket.name, config.file); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + signedUrl.search = (0, util_js_1.qsStringify)(query); + return signedUrl.href; + }); + } + getSignedUrlV2(config) { + const canonicalHeadersString = this.getCanonicalHeaders(config.extensionHeaders || {}); + const resourcePath = this.getResourcePath(false, config.bucket, config.file); + const blobToSign = [ + config.method, + config.contentMd5 || '', + config.contentType || '', + config.expiration, + canonicalHeadersString + resourcePath, + ].join('\n'); + const sign = async () => { + var _a; + const auth = this.auth; + try { + const signature = await auth.sign(blobToSign, (_a = config.signingEndpoint) === null || _a === void 0 ? void 0 : _a.toString()); + const credentials = await auth.getCredentials(); + return { + GoogleAccessId: credentials.client_email, + Expires: config.expiration, + Signature: signature, + }; + } + catch (err) { + const error = err; + const signingErr = new SigningError(error.message); + signingErr.stack = error.stack; + throw signingErr; + } + }; + return sign(); + } + getSignedUrlV4(config) { + var _a; + config.accessibleAt = config.accessibleAt + ? config.accessibleAt + : new Date(); + const millisecondsToSeconds = 1.0 / 1000.0; + const expiresPeriodInSeconds = config.expiration - config.accessibleAt.valueOf() * millisecondsToSeconds; + // v4 limit expiration to be 7 days maximum + if (expiresPeriodInSeconds > SEVEN_DAYS) { + throw new Error(`Max allowed expiration is seven days (${SEVEN_DAYS} seconds).`); + } + const extensionHeaders = Object.assign({}, config.extensionHeaders); + const fqdn = new url.URL(((_a = config.host) === null || _a === void 0 ? void 0 : _a.toString()) || config.cname || this.storage.apiEndpoint); + extensionHeaders.host = fqdn.hostname; + if (config.contentMd5) { + extensionHeaders['content-md5'] = config.contentMd5; + } + if (config.contentType) { + extensionHeaders['content-type'] = config.contentType; + } + let contentSha256; + const sha256Header = extensionHeaders['x-goog-content-sha256']; + if (sha256Header) { + if (typeof sha256Header !== 'string' || + !/[A-Fa-f0-9]{40}/.test(sha256Header)) { + throw new Error(SignerExceptionMessages.X_GOOG_CONTENT_SHA256); + } + contentSha256 = sha256Header; + } + const signedHeaders = Object.keys(extensionHeaders) + .map(header => header.toLowerCase()) + .sort() + .join(';'); + const extensionHeadersString = this.getCanonicalHeaders(extensionHeaders); + const datestamp = (0, util_js_1.formatAsUTCISO)(config.accessibleAt); + const credentialScope = `${datestamp}/auto/storage/goog4_request`; + const sign = async () => { + var _a; + const credentials = await this.auth.getCredentials(); + const credential = `${credentials.client_email}/${credentialScope}`; + const dateISO = (0, util_js_1.formatAsUTCISO)(config.accessibleAt ? config.accessibleAt : new Date(), true); + const queryParams = { + 'X-Goog-Algorithm': 'GOOG4-RSA-SHA256', + 'X-Goog-Credential': credential, + 'X-Goog-Date': dateISO, + 'X-Goog-Expires': expiresPeriodInSeconds.toString(10), + 'X-Goog-SignedHeaders': signedHeaders, + ...(config.queryParams || {}), + }; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const canonicalQueryParams = this.getCanonicalQueryParams(queryParams); + const canonicalRequest = this.getCanonicalRequest(config.method, this.getResourcePath(!!config.cname, config.bucket, config.file), canonicalQueryParams, extensionHeadersString, signedHeaders, contentSha256); + const hash = crypto + .createHash('sha256') + .update(canonicalRequest) + .digest('hex'); + const blobToSign = [ + 'GOOG4-RSA-SHA256', + dateISO, + credentialScope, + hash, + ].join('\n'); + try { + const signature = await this.auth.sign(blobToSign, (_a = config.signingEndpoint) === null || _a === void 0 ? void 0 : _a.toString()); + const signatureHex = Buffer.from(signature, 'base64').toString('hex'); + const signedQuery = Object.assign({}, queryParams, { + 'X-Goog-Signature': signatureHex, + }); + return signedQuery; + } + catch (err) { + const error = err; + const signingErr = new SigningError(error.message); + signingErr.stack = error.stack; + throw signingErr; + } + }; + return sign(); + } + /** + * Create canonical headers for signing v4 url. + * + * The canonical headers for v4-signing a request demands header names are + * first lowercased, followed by sorting the header names. + * Then, construct the canonical headers part of the request: + * + ":" + Trim() + "\n" + * .. + * + ":" + Trim() + "\n" + * + * @param headers + * @private + */ + getCanonicalHeaders(headers) { + // Sort headers by their lowercased names + const sortedHeaders = (0, util_js_1.objectEntries)(headers) + // Convert header names to lowercase + .map(([headerName, value]) => [ + headerName.toLowerCase(), + value, + ]) + .sort((a, b) => a[0].localeCompare(b[0])); + return sortedHeaders + .filter(([, value]) => value !== undefined) + .map(([headerName, value]) => { + // - Convert Array (multi-valued header) into string, delimited by + // ',' (no space). + // - Trim leading and trailing spaces. + // - Convert sequential (2+) spaces into a single space + const canonicalValue = `${value}`.trim().replace(/\s{2,}/g, ' '); + return `${headerName}:${canonicalValue}\n`; + }) + .join(''); + } + getCanonicalRequest(method, path, query, headers, signedHeaders, contentSha256) { + return [ + method, + path, + query, + headers, + signedHeaders, + contentSha256 || 'UNSIGNED-PAYLOAD', + ].join('\n'); + } + getCanonicalQueryParams(query) { + return (0, util_js_1.objectEntries)(query) + .map(([key, value]) => [(0, util_js_1.encodeURI)(key, true), (0, util_js_1.encodeURI)(value, true)]) + .sort((a, b) => (a[0] < b[0] ? -1 : 1)) + .map(([key, value]) => `${key}=${value}`) + .join('&'); + } + getResourcePath(cname, bucket, file) { + if (cname) { + return '/' + (file || ''); + } + else if (file) { + return `/${bucket}/${file}`; + } + else { + return `/${bucket}`; + } + } + parseExpires(expires, current = new Date()) { + const expiresInMSeconds = new Date(expires).valueOf(); + if (isNaN(expiresInMSeconds)) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_INVALID); + } + if (expiresInMSeconds < current.valueOf()) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_PAST); + } + return Math.floor(expiresInMSeconds / 1000); // The API expects seconds. + } + parseAccessibleAt(accessibleAt) { + const accessibleAtInMSeconds = new Date(accessibleAt || new Date()).valueOf(); + if (isNaN(accessibleAtInMSeconds)) { + throw new Error(SignerExceptionMessages.ACCESSIBLE_DATE_INVALID); + } + return Math.floor(accessibleAtInMSeconds / 1000); // The API expects seconds. + } +} +exports.URLSigner = URLSigner; +/** + * Custom error type for errors related to getting signed errors and policies. + * + * @private + */ +class SigningError extends Error { + constructor() { + super(...arguments); + this.name = 'SigningError'; + } +} +exports.SigningError = SigningError; + + +/***/ }), + +/***/ 2848: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Storage = exports.RETRYABLE_ERR_FN_DEFAULT = exports.MAX_RETRY_DELAY_DEFAULT = exports.TOTAL_TIMEOUT_DEFAULT = exports.RETRY_DELAY_MULTIPLIER_DEFAULT = exports.MAX_RETRY_DEFAULT = exports.AUTO_RETRY_DEFAULT = exports.PROTOCOL_REGEX = exports.StorageExceptionMessages = exports.ExceptionMessages = exports.IdempotencyStrategy = void 0; +const index_js_1 = __nccwpck_require__(1325); +const paginator_1 = __nccwpck_require__(9469); +const promisify_1 = __nccwpck_require__(206); +const stream_1 = __nccwpck_require__(2203); +const bucket_js_1 = __nccwpck_require__(2887); +const channel_js_1 = __nccwpck_require__(2658); +const file_js_1 = __nccwpck_require__(9975); +const util_js_1 = __nccwpck_require__(4557); +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +const package_json_helper_cjs_1 = __nccwpck_require__(1969); +const hmacKey_js_1 = __nccwpck_require__(5891); +const crc32c_js_1 = __nccwpck_require__(4317); +const google_auth_library_1 = __nccwpck_require__(492); +var IdempotencyStrategy; +(function (IdempotencyStrategy) { + IdempotencyStrategy[IdempotencyStrategy["RetryAlways"] = 0] = "RetryAlways"; + IdempotencyStrategy[IdempotencyStrategy["RetryConditional"] = 1] = "RetryConditional"; + IdempotencyStrategy[IdempotencyStrategy["RetryNever"] = 2] = "RetryNever"; +})(IdempotencyStrategy || (exports.IdempotencyStrategy = IdempotencyStrategy = {})); +var ExceptionMessages; +(function (ExceptionMessages) { + ExceptionMessages["EXPIRATION_DATE_INVALID"] = "The expiration date provided was invalid."; + ExceptionMessages["EXPIRATION_DATE_PAST"] = "An expiration date cannot be in the past."; +})(ExceptionMessages || (exports.ExceptionMessages = ExceptionMessages = {})); +var StorageExceptionMessages; +(function (StorageExceptionMessages) { + StorageExceptionMessages["BUCKET_NAME_REQUIRED"] = "A bucket name is needed to use Cloud Storage."; + StorageExceptionMessages["BUCKET_NAME_REQUIRED_CREATE"] = "A name is required to create a bucket."; + StorageExceptionMessages["HMAC_SERVICE_ACCOUNT"] = "The first argument must be a service account email to create an HMAC key."; + StorageExceptionMessages["HMAC_ACCESS_ID"] = "An access ID is needed to create an HmacKey object."; +})(StorageExceptionMessages || (exports.StorageExceptionMessages = StorageExceptionMessages = {})); +exports.PROTOCOL_REGEX = /^(\w*):\/\//; +/** + * Default behavior: Automatically retry retriable server errors. + * + * @const {boolean} + */ +exports.AUTO_RETRY_DEFAULT = true; +/** + * Default behavior: Only attempt to retry retriable errors 3 times. + * + * @const {number} + */ +exports.MAX_RETRY_DEFAULT = 3; +/** + * Default behavior: Wait twice as long as previous retry before retrying. + * + * @const {number} + */ +exports.RETRY_DELAY_MULTIPLIER_DEFAULT = 2; +/** + * Default behavior: If the operation doesn't succeed after 600 seconds, + * stop retrying. + * + * @const {number} + */ +exports.TOTAL_TIMEOUT_DEFAULT = 600; +/** + * Default behavior: Wait no more than 64 seconds between retries. + * + * @const {number} + */ +exports.MAX_RETRY_DELAY_DEFAULT = 64; +/** + * Default behavior: Retry conditionally idempotent operations if correct preconditions are set. + * + * @const {enum} + * @private + */ +const IDEMPOTENCY_STRATEGY_DEFAULT = IdempotencyStrategy.RetryConditional; +/** + * Returns true if the API request should be retried, given the error that was + * given the first time the request was attempted. + * @const + * @param {error} err - The API error to check if it is appropriate to retry. + * @return {boolean} True if the API request should be retried, false otherwise. + */ +const RETRYABLE_ERR_FN_DEFAULT = function (err) { + var _a; + const isConnectionProblem = (reason) => { + return (reason.includes('eai_again') || // DNS lookup error + reason === 'econnreset' || + reason === 'unexpected connection closure' || + reason === 'epipe' || + reason === 'socket connection timeout'); + }; + if (err) { + if ([408, 429, 500, 502, 503, 504].indexOf(err.code) !== -1) { + return true; + } + if (typeof err.code === 'string') { + if (['408', '429', '500', '502', '503', '504'].indexOf(err.code) !== -1) { + return true; + } + const reason = err.code.toLowerCase(); + if (isConnectionProblem(reason)) { + return true; + } + } + if (err.errors) { + for (const e of err.errors) { + const reason = (_a = e === null || e === void 0 ? void 0 : e.reason) === null || _a === void 0 ? void 0 : _a.toString().toLowerCase(); + if (reason && isConnectionProblem(reason)) { + return true; + } + } + } + } + return false; +}; +exports.RETRYABLE_ERR_FN_DEFAULT = RETRYABLE_ERR_FN_DEFAULT; +/*! Developer Documentation + * + * Invoke this method to create a new Storage object bound with pre-determined + * configuration options. For each object that can be created (e.g., a bucket), + * there is an equivalent static and instance method. While they are classes, + * they can be instantiated without use of the `new` keyword. + */ +/** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * This object provides constants to refer to the three permission levels that + * can be granted to an entity: + * + * - `gcs.acl.OWNER_ROLE` - ("OWNER") + * - `gcs.acl.READER_ROLE` - ("READER") + * - `gcs.acl.WRITER_ROLE` - ("WRITER") + * + * See {@link https://cloud.google.com/storage/docs/access-control/lists| About Access Control Lists} + * + * @name Storage#acl + * @type {object} + * @property {string} OWNER_ROLE + * @property {string} READER_ROLE + * @property {string} WRITER_ROLE + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const albums = storage.bucket('albums'); + * + * //- + * // Make all of the files currently in a bucket publicly readable. + * //- + * const options = { + * entity: 'allUsers', + * role: storage.acl.READER_ROLE + * }; + * + * albums.acl.add(options, function(err, aclObject) {}); + * + * //- + * // Make any new objects added to a bucket publicly readable. + * //- + * albums.acl.default.add(options, function(err, aclObject) {}); + * + * //- + * // Grant a user ownership permissions to a bucket. + * //- + * albums.acl.add({ + * entity: 'user-useremail@example.com', + * role: storage.acl.OWNER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * albums.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ +/** + * Get {@link Bucket} objects for all of the buckets in your project as + * a readable object stream. + * + * @method Storage#getBucketsStream + * @param {GetBucketsRequest} [query] Query object for listing buckets. + * @returns {ReadableStream} A readable stream that emits {@link Bucket} + * instances. + * + * @example + * ``` + * storage.getBucketsStream() + * .on('error', console.error) + * .on('data', function(bucket) { + * // bucket is a Bucket object. + * }) + * .on('end', function() { + * // All buckets retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * storage.getBucketsStream() + * .on('data', function(bucket) { + * this.end(); + * }); + * ``` + */ +/** + * Get {@link HmacKey} objects for all of the HMAC keys in the project in a + * readable object stream. + * + * @method Storage#getHmacKeysStream + * @param {GetHmacKeysOptions} [options] Configuration options. + * @returns {ReadableStream} A readable stream that emits {@link HmacKey} + * instances. + * + * @example + * ``` + * storage.getHmacKeysStream() + * .on('error', console.error) + * .on('data', function(hmacKey) { + * // hmacKey is an HmacKey object. + * }) + * .on('end', function() { + * // All HmacKey retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * storage.getHmacKeysStream() + * .on('data', function(bucket) { + * this.end(); + * }); + * ``` + */ +/** + *

ACLs

+ * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share files with other users + * and allow other users to access your buckets and files. + * + * To learn more about ACLs, read this overview on + * {@link https://cloud.google.com/storage/docs/access-control| Access Control}. + * + * See {@link https://cloud.google.com/storage/docs/overview| Cloud Storage overview} + * See {@link https://cloud.google.com/storage/docs/access-control| Access Control} + * + * @class + */ +class Storage extends index_js_1.Service { + getBucketsStream() { + // placeholder body, overwritten in constructor + return new stream_1.Readable(); + } + getHmacKeysStream() { + // placeholder body, overwritten in constructor + return new stream_1.Readable(); + } + /** + * @callback Crc32cGeneratorToStringCallback + * A method returning the CRC32C as a base64-encoded string. + * + * @returns {string} + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ + /** + * @callback Crc32cGeneratorValidateCallback + * A method validating a base64-encoded CRC32C string. + * + * @param {string} [value] base64-encoded CRC32C string to validate + * @returns {boolean} + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + **/ + /** + * @callback Crc32cGeneratorUpdateCallback + * A method for passing `Buffer`s for CRC32C generation. + * + * @param {Buffer} [data] data to update CRC32C value with + * @returns {undefined} + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + **/ + /** + * @typedef {object} CRC32CValidator + * @property {Crc32cGeneratorToStringCallback} + * @property {Crc32cGeneratorValidateCallback} + * @property {Crc32cGeneratorUpdateCallback} + */ + /** + * @callback Crc32cGeneratorCallback + * @returns {CRC32CValidator} + */ + /** + * @typedef {object} StorageOptions + * @property {string} [projectId] The project ID from the Google Developer's + * Console, e.g. 'grape-spaceship-123'. We will also check the environment + * variable `GCLOUD_PROJECT` for your project ID. If your app is running + * in an environment which supports {@link + * https://cloud.google.com/docs/authentication/production#providing_credentials_to_your_application + * Application Default Credentials}, your project ID will be detected + * automatically. + * @property {string} [keyFilename] Full path to the a .json, .pem, or .p12 key + * downloaded from the Google Developers Console. If you provide a path to + * a JSON file, the `projectId` option above is not necessary. NOTE: .pem and + * .p12 require you to specify the `email` option as well. + * @property {string} [email] Account email address. Required when using a .pem + * or .p12 keyFilename. + * @property {object} [credentials] Credentials object. + * @property {string} [credentials.client_email] + * @property {string} [credentials.private_key] + * @property {object} [retryOptions] Options for customizing retries. Retriable server errors + * will be retried with exponential delay between them dictated by the formula + * max(maxRetryDelay, retryDelayMultiplier*retryNumber) until maxRetries or totalTimeout + * has been reached. Retries will only happen if autoRetry is set to true. + * @property {boolean} [retryOptions.autoRetry=true] Automatically retry requests if the + * response is related to rate limits or certain intermittent server + * errors. We will exponentially backoff subsequent requests by default. + * @property {number} [retryOptions.retryDelayMultiplier = 2] the multiplier by which to + * increase the delay time between the completion of failed requests, and the + * initiation of the subsequent retrying request. + * @property {number} [retryOptions.totalTimeout = 600] The total time, starting from + * when the initial request is sent, after which an error will + * be returned, regardless of the retrying attempts made meanwhile. + * @property {number} [retryOptions.maxRetryDelay = 64] The maximum delay time between requests. + * When this value is reached, ``retryDelayMultiplier`` will no longer be used to + * increase delay time. + * @property {number} [retryOptions.maxRetries=3] Maximum number of automatic retries + * attempted before returning the error. + * @property {function} [retryOptions.retryableErrorFn] Function that returns true if a given + * error should be retried and false otherwise. + * @property {enum} [retryOptions.idempotencyStrategy=IdempotencyStrategy.RetryConditional] Enumeration + * controls how conditionally idempotent operations are retried. Possible values are: RetryAlways - + * will respect other retry settings and attempt to retry conditionally idempotent operations. RetryConditional - + * will retry conditionally idempotent operations if the correct preconditions are set. RetryNever - never + * retry a conditionally idempotent operation. + * @property {string} [userAgent] The value to be prepended to the User-Agent + * header in API requests. + * @property {object} [authClient] `AuthClient` or `GoogleAuth` client to reuse instead of creating a new one. + * @property {number} [timeout] The amount of time in milliseconds to wait per http request before timing out. + * @property {object[]} [interceptors_] Array of custom request interceptors to be returned in the order they were assigned. + * @property {string} [apiEndpoint = storage.google.com] The API endpoint of the service used to make requests. + * @property {boolean} [useAuthWithCustomEndpoint = false] Controls whether or not to use authentication when using a custom endpoint. + * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C} + */ + /** + * Constructs the Storage client. + * + * @example + * Create a client that uses Application Default Credentials + * (ADC) + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * ``` + * + * @example + * Create a client with explicit credentials + * ``` + * const storage = new Storage({ + * projectId: 'your-project-id', + * keyFilename: '/path/to/keyfile.json' + * }); + * ``` + * + * @example + * Create a client with credentials passed + * by value as a JavaScript object + * ``` + * const storage = new Storage({ + * projectId: 'your-project-id', + * credentials: { + * type: 'service_account', + * project_id: 'xxxxxxx', + * private_key_id: 'xxxx', + * private_key:'-----BEGIN PRIVATE KEY-----xxxxxxx\n-----END PRIVATE KEY-----\n', + * client_email: 'xxxx', + * client_id: 'xxx', + * auth_uri: 'https://accounts.google.com/o/oauth2/auth', + * token_uri: 'https://oauth2.googleapis.com/token', + * auth_provider_x509_cert_url: 'https://www.googleapis.com/oauth2/v1/certs', + * client_x509_cert_url: 'xxx', + * } + * }); + * ``` + * + * @example + * Create a client with credentials passed + * by loading a JSON file directly from disk + * ``` + * const storage = new Storage({ + * projectId: 'your-project-id', + * credentials: require('/path/to-keyfile.json') + * }); + * ``` + * + * @example + * Create a client with an `AuthClient` (e.g. `DownscopedClient`) + * ``` + * const {DownscopedClient} = require('google-auth-library'); + * const authClient = new DownscopedClient({...}); + * + * const storage = new Storage({authClient}); + * ``` + * + * Additional samples: + * - https://github.com/googleapis/google-auth-library-nodejs#sample-usage-1 + * - https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/downscopedclient.js + * + * @param {StorageOptions} [options] Configuration options. + */ + constructor(options = {}) { + var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p; + const universe = options.universeDomain || google_auth_library_1.DEFAULT_UNIVERSE; + let apiEndpoint = `https://storage.${universe}`; + let customEndpoint = false; + // Note: EMULATOR_HOST is an experimental configuration variable. Use apiEndpoint instead. + const EMULATOR_HOST = process.env.STORAGE_EMULATOR_HOST; + if (typeof EMULATOR_HOST === 'string') { + apiEndpoint = Storage.sanitizeEndpoint(EMULATOR_HOST); + customEndpoint = true; + } + if (options.apiEndpoint && options.apiEndpoint !== apiEndpoint) { + apiEndpoint = Storage.sanitizeEndpoint(options.apiEndpoint); + customEndpoint = true; + } + options = Object.assign({}, options, { apiEndpoint }); + // Note: EMULATOR_HOST is an experimental configuration variable. Use apiEndpoint instead. + const baseUrl = EMULATOR_HOST || `${options.apiEndpoint}/storage/v1`; + const config = { + apiEndpoint: options.apiEndpoint, + retryOptions: { + autoRetry: ((_a = options.retryOptions) === null || _a === void 0 ? void 0 : _a.autoRetry) !== undefined + ? (_b = options.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry + : exports.AUTO_RETRY_DEFAULT, + maxRetries: ((_c = options.retryOptions) === null || _c === void 0 ? void 0 : _c.maxRetries) + ? (_d = options.retryOptions) === null || _d === void 0 ? void 0 : _d.maxRetries + : exports.MAX_RETRY_DEFAULT, + retryDelayMultiplier: ((_e = options.retryOptions) === null || _e === void 0 ? void 0 : _e.retryDelayMultiplier) + ? (_f = options.retryOptions) === null || _f === void 0 ? void 0 : _f.retryDelayMultiplier + : exports.RETRY_DELAY_MULTIPLIER_DEFAULT, + totalTimeout: ((_g = options.retryOptions) === null || _g === void 0 ? void 0 : _g.totalTimeout) + ? (_h = options.retryOptions) === null || _h === void 0 ? void 0 : _h.totalTimeout + : exports.TOTAL_TIMEOUT_DEFAULT, + maxRetryDelay: ((_j = options.retryOptions) === null || _j === void 0 ? void 0 : _j.maxRetryDelay) + ? (_k = options.retryOptions) === null || _k === void 0 ? void 0 : _k.maxRetryDelay + : exports.MAX_RETRY_DELAY_DEFAULT, + retryableErrorFn: ((_l = options.retryOptions) === null || _l === void 0 ? void 0 : _l.retryableErrorFn) + ? (_m = options.retryOptions) === null || _m === void 0 ? void 0 : _m.retryableErrorFn + : exports.RETRYABLE_ERR_FN_DEFAULT, + idempotencyStrategy: ((_o = options.retryOptions) === null || _o === void 0 ? void 0 : _o.idempotencyStrategy) !== undefined + ? (_p = options.retryOptions) === null || _p === void 0 ? void 0 : _p.idempotencyStrategy + : IDEMPOTENCY_STRATEGY_DEFAULT, + }, + baseUrl, + customEndpoint, + useAuthWithCustomEndpoint: options === null || options === void 0 ? void 0 : options.useAuthWithCustomEndpoint, + projectIdRequired: false, + scopes: [ + 'https://www.googleapis.com/auth/iam', + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/devstorage.full_control', + ], + packageJson: (0, package_json_helper_cjs_1.getPackageJSON)(), + }; + super(config, options); + /** + * Reference to {@link Storage.acl}. + * + * @name Storage#acl + * @see Storage.acl + */ + this.acl = Storage.acl; + this.crc32cGenerator = + options.crc32cGenerator || crc32c_js_1.CRC32C_DEFAULT_VALIDATOR_GENERATOR; + this.retryOptions = config.retryOptions; + this.getBucketsStream = paginator_1.paginator.streamify('getBuckets'); + this.getHmacKeysStream = paginator_1.paginator.streamify('getHmacKeys'); + } + static sanitizeEndpoint(url) { + if (!exports.PROTOCOL_REGEX.test(url)) { + url = `https://${url}`; + } + return url.replace(/\/+$/, ''); // Remove trailing slashes + } + /** + * Get a reference to a Cloud Storage bucket. + * + * @param {string} name Name of the bucket. + * @param {object} [options] Configuration object. + * @param {string} [options.kmsKeyName] A Cloud KMS key that will be used to + * encrypt objects inserted into this bucket, if no encryption method is + * specified. + * @param {string} [options.userProject] User project to be billed for all + * requests made from this Bucket object. + * @returns {Bucket} + * @see Bucket + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const albums = storage.bucket('albums'); + * const photos = storage.bucket('photos'); + * ``` + */ + bucket(name, options) { + if (!name) { + throw new Error(StorageExceptionMessages.BUCKET_NAME_REQUIRED); + } + return new bucket_js_1.Bucket(this, name, options); + } + /** + * Reference a channel to receive notifications about changes to your bucket. + * + * @param {string} id The ID of the channel. + * @param {string} resourceId The resource ID of the channel. + * @returns {Channel} + * @see Channel + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const channel = storage.channel('id', 'resource-id'); + * ``` + */ + channel(id, resourceId) { + return new channel_js_1.Channel(this, id, resourceId); + } + /** + * @typedef {array} CreateBucketResponse + * @property {Bucket} 0 The new {@link Bucket}. + * @property {object} 1 The full API response. + */ + /** + * @callback CreateBucketCallback + * @param {?Error} err Request error, if any. + * @param {Bucket} bucket The new {@link Bucket}. + * @param {object} apiResponse The full API response. + */ + /** + * Metadata to set for the bucket. + * + * @typedef {object} CreateBucketRequest + * @property {boolean} [archive=false] Specify the storage class as Archive. + * @property {object} [autoclass.enabled=false] Specify whether Autoclass is + * enabled for the bucket. + * @property {object} [autoclass.terminalStorageClass='NEARLINE'] The storage class that objects in an Autoclass bucket eventually transition to if + * they are not read for a certain length of time. Valid values are NEARLINE and ARCHIVE. + * @property {boolean} [coldline=false] Specify the storage class as Coldline. + * @property {Cors[]} [cors=[]] Specify the CORS configuration to use. + * @property {CustomPlacementConfig} [customPlacementConfig={}] Specify the bucket's regions for dual-region buckets. + * For more information, see {@link https://cloud.google.com/storage/docs/locations| Bucket Locations}. + * @property {boolean} [dra=false] Specify the storage class as Durable Reduced + * Availability. + * @property {boolean} [enableObjectRetention=false] Specifiy whether or not object retention should be enabled on this bucket. + * @property {object} [hierarchicalNamespace.enabled=false] Specify whether or not to enable hierarchical namespace on this bucket. + * @property {string} [location] Specify the bucket's location. If specifying + * a dual-region, the `customPlacementConfig` property should be set in conjunction. + * For more information, see {@link https://cloud.google.com/storage/docs/locations| Bucket Locations}. + * @property {boolean} [multiRegional=false] Specify the storage class as + * Multi-Regional. + * @property {boolean} [nearline=false] Specify the storage class as Nearline. + * @property {boolean} [regional=false] Specify the storage class as Regional. + * @property {boolean} [requesterPays=false] Force the use of the User Project metadata field to assign operational + * costs when an operation is made on a Bucket and its objects. + * @property {string} [rpo] For dual-region buckets, controls whether turbo + * replication is enabled (`ASYNC_TURBO`) or disabled (`DEFAULT`). + * @property {boolean} [standard=true] Specify the storage class as Standard. + * @property {string} [storageClass] The new storage class. (`standard`, + * `nearline`, `coldline`, or `archive`). + * **Note:** The storage classes `multi_regional`, `regional`, and + * `durable_reduced_availability` are now legacy and will be deprecated in + * the future. + * @property {Versioning} [versioning=undefined] Specify the versioning status. + * @property {string} [userProject] The ID of the project which will be billed + * for the request. + */ + /** + * Create a bucket. + * + * Cloud Storage uses a flat namespace, so you can't create a bucket with + * a name that is already in use. For more information, see + * {@link https://cloud.google.com/storage/docs/bucketnaming.html#requirements| Bucket Naming Guidelines}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/insert| Buckets: insert API Documentation} + * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes} + * + * @param {string} name Name of the bucket to create. + * @param {CreateBucketRequest} [metadata] Metadata to set for the bucket. + * @param {CreateBucketCallback} [callback] Callback function. + * @returns {Promise} + * @throws {Error} If a name is not provided. + * @see Bucket#create + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const callback = function(err, bucket, apiResponse) { + * // `bucket` is a Bucket object. + * }; + * + * storage.createBucket('new-bucket', callback); + * + * //- + * // Create a bucket in a specific location and region. See the + * // Official JSON API docs for complete details on the `location` + * option. + * // + * //- + * const metadata = { + * location: 'US-CENTRAL1', + * regional: true + * }; + * + * storage.createBucket('new-bucket', metadata, callback); + * + * //- + * // Create a bucket with a retention policy of 6 months. + * //- + * const metadata = { + * retentionPolicy: { + * retentionPeriod: 15780000 // 6 months in seconds. + * } + * }; + * + * storage.createBucket('new-bucket', metadata, callback); + * + * //- + * // Enable versioning on a new bucket. + * //- + * const metadata = { + * versioning: { + * enabled: true + * } + * }; + * + * storage.createBucket('new-bucket', metadata, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.createBucket('new-bucket').then(function(data) { + * const bucket = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/buckets.js + * region_tag:storage_create_bucket + * Another example: + */ + createBucket(name, metadataOrCallback, callback) { + if (!name) { + throw new Error(StorageExceptionMessages.BUCKET_NAME_REQUIRED_CREATE); + } + let metadata; + if (!callback) { + callback = metadataOrCallback; + metadata = {}; + } + else { + metadata = metadataOrCallback; + } + const body = { + ...metadata, + name, + }; + const storageClasses = { + archive: 'ARCHIVE', + coldline: 'COLDLINE', + dra: 'DURABLE_REDUCED_AVAILABILITY', + multiRegional: 'MULTI_REGIONAL', + nearline: 'NEARLINE', + regional: 'REGIONAL', + standard: 'STANDARD', + }; + const storageClassKeys = Object.keys(storageClasses); + for (const storageClass of storageClassKeys) { + if (body[storageClass]) { + if (metadata.storageClass && metadata.storageClass !== storageClass) { + throw new Error(`Both \`${storageClass}\` and \`storageClass\` were provided.`); + } + body.storageClass = storageClasses[storageClass]; + delete body[storageClass]; + } + } + if (body.requesterPays) { + body.billing = { + requesterPays: body.requesterPays, + }; + delete body.requesterPays; + } + const query = { + project: this.projectId, + }; + if (body.userProject) { + query.userProject = body.userProject; + delete body.userProject; + } + if (body.enableObjectRetention) { + query.enableObjectRetention = body.enableObjectRetention; + delete body.enableObjectRetention; + } + if (body.predefinedAcl) { + query.predefinedAcl = body.predefinedAcl; + delete body.predefinedAcl; + } + if (body.predefinedDefaultObjectAcl) { + query.predefinedDefaultObjectAcl = body.predefinedDefaultObjectAcl; + delete body.predefinedDefaultObjectAcl; + } + if (body.projection) { + query.projection = body.projection; + delete body.projection; + } + this.request({ + method: 'POST', + uri: '/b', + qs: query, + json: body, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + const bucket = this.bucket(name); + bucket.metadata = resp; + callback(null, bucket, resp); + }); + } + /** + * @typedef {object} CreateHmacKeyOptions + * @property {string} [projectId] The project ID of the project that owns + * the service account of the requested HMAC key. If not provided, + * the project ID used to instantiate the Storage client will be used. + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * @typedef {object} HmacKeyMetadata + * @property {string} accessId The access id identifies which HMAC key was + * used to sign a request when authenticating with HMAC. + * @property {string} etag Used to perform a read-modify-write of the key. + * @property {string} id The resource name of the HMAC key. + * @property {string} projectId The project ID. + * @property {string} serviceAccountEmail The service account's email this + * HMAC key is created for. + * @property {string} state The state of this HMAC key. One of "ACTIVE", + * "INACTIVE" or "DELETED". + * @property {string} timeCreated The creation time of the HMAC key in + * RFC 3339 format. + * @property {string} [updated] The time this HMAC key was last updated in + * RFC 3339 format. + */ + /** + * @typedef {array} CreateHmacKeyResponse + * @property {HmacKey} 0 The HmacKey instance created from API response. + * @property {string} 1 The HMAC key's secret used to access the XML API. + * @property {object} 3 The raw API response. + */ + /** + * @callback CreateHmacKeyCallback Callback function. + * @param {?Error} err Request error, if any. + * @param {HmacKey} hmacKey The HmacKey instance created from API response. + * @param {string} secret The HMAC key's secret used to access the XML API. + * @param {object} apiResponse The raw API response. + */ + /** + * Create an HMAC key associated with an service account to authenticate + * requests to the Cloud Storage XML API. + * + * See {@link https://cloud.google.com/storage/docs/authentication/hmackeys| HMAC keys documentation} + * + * @param {string} serviceAccountEmail The service account's email address + * with which the HMAC key is created for. + * @param {CreateHmacKeyCallback} [callback] Callback function. + * @return {Promise} + * + * @example + * ``` + * const {Storage} = require('google-cloud/storage'); + * const storage = new Storage(); + * + * // Replace with your service account's email address + * const serviceAccountEmail = + * 'my-service-account@appspot.gserviceaccount.com'; + * + * storage.createHmacKey(serviceAccountEmail, function(err, hmacKey, secret) { + * if (!err) { + * // Securely store the secret for use with the XML API. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.createHmacKey(serviceAccountEmail) + * .then((response) => { + * const hmacKey = response[0]; + * const secret = response[1]; + * // Securely store the secret for use with the XML API. + * }); + * ``` + */ + createHmacKey(serviceAccountEmail, optionsOrCb, cb) { + if (typeof serviceAccountEmail !== 'string') { + throw new Error(StorageExceptionMessages.HMAC_SERVICE_ACCOUNT); + } + const { options, callback } = (0, util_js_1.normalize)(optionsOrCb, cb); + const query = Object.assign({}, options, { serviceAccountEmail }); + const projectId = query.projectId || this.projectId; + delete query.projectId; + this.request({ + method: 'POST', + uri: `/projects/${projectId}/hmacKeys`, + qs: query, + maxRetries: 0, //explicitly set this value since this is a non-idempotent function + }, (err, resp) => { + if (err) { + callback(err, null, null, resp); + return; + } + const metadata = resp.metadata; + const hmacKey = this.hmacKey(metadata.accessId, { + projectId: metadata.projectId, + }); + hmacKey.metadata = resp.metadata; + callback(null, hmacKey, resp.secret, resp); + }); + } + /** + * Query object for listing buckets. + * + * @typedef {object} GetBucketsRequest + * @property {boolean} [autoPaginate=true] Have pagination handled + * automatically. + * @property {number} [maxApiCalls] Maximum number of API calls to make. + * @property {number} [maxResults] Maximum number of items plus prefixes to + * return per call. + * Note: By default will handle pagination automatically + * if more than 1 page worth of results are requested per call. + * When `autoPaginate` is set to `false` the smaller of `maxResults` + * or 1 page of results will be returned per call. + * @property {string} [pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @property {string} [userProject] The ID of the project which will be billed + * for the request. + * @param {boolean} [softDeleted] If true, returns the soft-deleted object. + * Object `generation` is required if `softDeleted` is set to True. + */ + /** + * @typedef {array} GetBucketsResponse + * @property {Bucket[]} 0 Array of {@link Bucket} instances. + * @property {object} 1 nextQuery A query object to receive more results. + * @property {object} 2 The full API response. + */ + /** + * @callback GetBucketsCallback + * @param {?Error} err Request error, if any. + * @param {Bucket[]} buckets Array of {@link Bucket} instances. + * @param {object} nextQuery A query object to receive more results. + * @param {object} apiResponse The full API response. + */ + /** + * Get Bucket objects for all of the buckets in your project. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/list| Buckets: list API Documentation} + * + * @param {GetBucketsRequest} [query] Query object for listing buckets. + * @param {GetBucketsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * storage.getBuckets(function(err, buckets) { + * if (!err) { + * // buckets is an array of Bucket objects. + * } + * }); + * + * //- + * // To control how many API requests are made and page through the results + * // manually, set `autoPaginate` to `false`. + * //- + * const callback = function(err, buckets, nextQuery, apiResponse) { + * if (nextQuery) { + * // More results exist. + * storage.getBuckets(nextQuery, callback); + * } + * + * // The `metadata` property is populated for you with the metadata at the + * // time of fetching. + * buckets[0].metadata; + * + * // However, in cases where you are concerned the metadata could have + * // changed, use the `getMetadata` method. + * buckets[0].getMetadata(function(err, metadata, apiResponse) {}); + * }; + * + * storage.getBuckets({ + * autoPaginate: false + * }, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.getBuckets().then(function(data) { + * const buckets = data[0]; + * }); + * + * ``` + * @example include:samples/buckets.js + * region_tag:storage_list_buckets + * Another example: + */ + getBuckets(optionsOrCallback, cb) { + const { options, callback } = (0, util_js_1.normalize)(optionsOrCallback, cb); + options.project = options.project || this.projectId; + this.request({ + uri: '/b', + qs: options, + }, (err, resp) => { + if (err) { + callback(err, null, null, resp); + return; + } + const itemsArray = resp.items ? resp.items : []; + const buckets = itemsArray.map((bucket) => { + const bucketInstance = this.bucket(bucket.id); + bucketInstance.metadata = bucket; + return bucketInstance; + }); + const nextQuery = resp.nextPageToken + ? Object.assign({}, options, { pageToken: resp.nextPageToken }) + : null; + callback(null, buckets, nextQuery, resp); + }); + } + getHmacKeys(optionsOrCb, cb) { + const { options, callback } = (0, util_js_1.normalize)(optionsOrCb, cb); + const query = Object.assign({}, options); + const projectId = query.projectId || this.projectId; + delete query.projectId; + this.request({ + uri: `/projects/${projectId}/hmacKeys`, + qs: query, + }, (err, resp) => { + if (err) { + callback(err, null, null, resp); + return; + } + const itemsArray = resp.items ? resp.items : []; + const hmacKeys = itemsArray.map((hmacKey) => { + const hmacKeyInstance = this.hmacKey(hmacKey.accessId, { + projectId: hmacKey.projectId, + }); + hmacKeyInstance.metadata = hmacKey; + return hmacKeyInstance; + }); + const nextQuery = resp.nextPageToken + ? Object.assign({}, options, { pageToken: resp.nextPageToken }) + : null; + callback(null, hmacKeys, nextQuery, resp); + }); + } + /** + * @typedef {array} GetServiceAccountResponse + * @property {object} 0 The service account resource. + * @property {object} 1 The full + * {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| API response}. + */ + /** + * @callback GetServiceAccountCallback + * @param {?Error} err Request error, if any. + * @param {object} serviceAccount The serviceAccount resource. + * @param {string} serviceAccount.emailAddress The service account email + * address. + * @param {object} apiResponse The full + * {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| API response}. + */ + /** + * Get the email address of this project's Google Cloud Storage service + * account. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount/get| Projects.serviceAccount: get API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| Projects.serviceAccount Resource} + * + * @param {object} [options] Configuration object. + * @param {string} [options.userProject] User project to be billed for this + * request. + * @param {GetServiceAccountCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * storage.getServiceAccount(function(err, serviceAccount, apiResponse) { + * if (!err) { + * const serviceAccountEmail = serviceAccount.emailAddress; + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.getServiceAccount().then(function(data) { + * const serviceAccountEmail = data[0].emailAddress; + * const apiResponse = data[1]; + * }); + * ``` + */ + getServiceAccount(optionsOrCallback, cb) { + const { options, callback } = (0, util_js_1.normalize)(optionsOrCallback, cb); + this.request({ + uri: `/projects/${this.projectId}/serviceAccount`, + qs: options, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + const camelCaseResponse = {}; + for (const prop in resp) { + // eslint-disable-next-line no-prototype-builtins + if (resp.hasOwnProperty(prop)) { + const camelCaseProp = prop.replace(/_(\w)/g, (_, match) => match.toUpperCase()); + camelCaseResponse[camelCaseProp] = resp[prop]; + } + } + callback(null, camelCaseResponse, resp); + }); + } + /** + * Get a reference to an HmacKey object. + * Note: this does not fetch the HMAC key's metadata. Use HmacKey#get() to + * retrieve and populate the metadata. + * + * To get a reference to an HMAC key that's not created for a service + * account in the same project used to instantiate the Storage client, + * supply the project's ID as `projectId` in the `options` argument. + * + * @param {string} accessId The HMAC key's access ID. + * @param {HmacKeyOptions} options HmacKey constructor options. + * @returns {HmacKey} + * @see HmacKey + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const hmacKey = storage.hmacKey('ACCESS_ID'); + * ``` + */ + hmacKey(accessId, options) { + if (!accessId) { + throw new Error(StorageExceptionMessages.HMAC_ACCESS_ID); + } + return new hmacKey_js_1.HmacKey(this, accessId, options); + } +} +exports.Storage = Storage; +/** + * {@link Bucket} class. + * + * @name Storage.Bucket + * @see Bucket + * @type {Constructor} + */ +Storage.Bucket = bucket_js_1.Bucket; +/** + * {@link Channel} class. + * + * @name Storage.Channel + * @see Channel + * @type {Constructor} + */ +Storage.Channel = channel_js_1.Channel; +/** + * {@link File} class. + * + * @name Storage.File + * @see File + * @type {Constructor} + */ +Storage.File = file_js_1.File; +/** + * {@link HmacKey} class. + * + * @name Storage.HmacKey + * @see HmacKey + * @type {Constructor} + */ +Storage.HmacKey = hmacKey_js_1.HmacKey; +Storage.acl = { + OWNER_ROLE: 'OWNER', + READER_ROLE: 'READER', + WRITER_ROLE: 'WRITER', +}; +/*! Developer Documentation + * + * These methods can be auto-paginated. + */ +paginator_1.paginator.extend(Storage, ['getBuckets', 'getHmacKeys']); +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Storage, { + exclude: ['bucket', 'channel', 'hmacKey'], +}); + + +/***/ }), + +/***/ 4: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +var _XMLMultiPartUploadHelper_instances, _XMLMultiPartUploadHelper_setGoogApiClientHeaders, _XMLMultiPartUploadHelper_handleErrorResponse; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.TransferManager = exports.MultiPartUploadError = void 0; +const file_js_1 = __nccwpck_require__(9975); +const p_limit_1 = __importDefault(__nccwpck_require__(9977)); +const path = __importStar(__nccwpck_require__(6928)); +const fs_1 = __nccwpck_require__(9896); +const crc32c_js_1 = __nccwpck_require__(4317); +const google_auth_library_1 = __nccwpck_require__(492); +const fast_xml_parser_1 = __nccwpck_require__(9741); +const async_retry_1 = __importDefault(__nccwpck_require__(5195)); +const crypto_1 = __nccwpck_require__(6982); +const util_js_1 = __nccwpck_require__(7325); +const util_js_2 = __nccwpck_require__(4557); +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +const package_json_helper_cjs_1 = __nccwpck_require__(1969); +const packageJson = (0, package_json_helper_cjs_1.getPackageJSON)(); +/** + * Default number of concurrently executing promises to use when calling uploadManyFiles. + * + */ +const DEFAULT_PARALLEL_UPLOAD_LIMIT = 5; +/** + * Default number of concurrently executing promises to use when calling downloadManyFiles. + * + */ +const DEFAULT_PARALLEL_DOWNLOAD_LIMIT = 5; +/** + * Default number of concurrently executing promises to use when calling downloadFileInChunks. + * + */ +const DEFAULT_PARALLEL_CHUNKED_DOWNLOAD_LIMIT = 5; +/** + * The minimum size threshold in bytes at which to apply a chunked download strategy when calling downloadFileInChunks. + * + */ +const DOWNLOAD_IN_CHUNKS_FILE_SIZE_THRESHOLD = 32 * 1024 * 1024; +/** + * The chunk size in bytes to use when calling downloadFileInChunks. + * + */ +const DOWNLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE = 32 * 1024 * 1024; +/** + * The chunk size in bytes to use when calling uploadFileInChunks. + * + */ +const UPLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE = 32 * 1024 * 1024; +/** + * Default number of concurrently executing promises to use when calling uploadFileInChunks. + * + */ +const DEFAULT_PARALLEL_CHUNKED_UPLOAD_LIMIT = 5; +const EMPTY_REGEX = '(?:)'; +/** + * The `gccl-gcs-cmd` value for the `X-Goog-API-Client` header. + * Example: `gccl-gcs-cmd/tm.upload_many` + * + * @see {@link GCCL_GCS_CMD}. + * @see {@link GCCL_GCS_CMD_KEY}. + */ +const GCCL_GCS_CMD_FEATURE = { + UPLOAD_MANY: 'tm.upload_many', + DOWNLOAD_MANY: 'tm.download_many', + UPLOAD_SHARDED: 'tm.upload_sharded', + DOWNLOAD_SHARDED: 'tm.download_sharded', +}; +const defaultMultiPartGenerator = (bucket, fileName, uploadId, partsMap) => { + return new XMLMultiPartUploadHelper(bucket, fileName, uploadId, partsMap); +}; +class MultiPartUploadError extends Error { + constructor(message, uploadId, partsMap) { + super(message); + this.uploadId = uploadId; + this.partsMap = partsMap; + } +} +exports.MultiPartUploadError = MultiPartUploadError; +/** + * Class representing an implementation of MPU in the XML API. This class is not meant for public usage. + * + * @private + * + */ +class XMLMultiPartUploadHelper { + constructor(bucket, fileName, uploadId, partsMap) { + _XMLMultiPartUploadHelper_instances.add(this); + this.authClient = bucket.storage.authClient || new google_auth_library_1.GoogleAuth(); + this.uploadId = uploadId || ''; + this.bucket = bucket; + this.fileName = fileName; + this.baseUrl = `https://${bucket.name}.${new URL(this.bucket.storage.apiEndpoint).hostname}/${fileName}`; + this.xmlBuilder = new fast_xml_parser_1.XMLBuilder({ arrayNodeName: 'Part' }); + this.xmlParser = new fast_xml_parser_1.XMLParser(); + this.partsMap = partsMap || new Map(); + this.retryOptions = { + retries: this.bucket.storage.retryOptions.maxRetries, + factor: this.bucket.storage.retryOptions.retryDelayMultiplier, + maxTimeout: this.bucket.storage.retryOptions.maxRetryDelay * 1000, + maxRetryTime: this.bucket.storage.retryOptions.totalTimeout * 1000, + }; + } + /** + * Initiates a multipart upload (MPU) to the XML API and stores the resultant upload id. + * + * @returns {Promise} + */ + async initiateUpload(headers = {}) { + const url = `${this.baseUrl}?uploads`; + return (0, async_retry_1.default)(async (bail) => { + try { + const res = await this.authClient.request({ + headers: __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_setGoogApiClientHeaders).call(this, headers), + method: 'POST', + url, + }); + if (res.data && res.data.error) { + throw res.data.error; + } + const parsedXML = this.xmlParser.parse(res.data); + this.uploadId = parsedXML.InitiateMultipartUploadResult.UploadId; + } + catch (e) { + __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail); + } + }, this.retryOptions); + } + /** + * Uploads the provided chunk of data to the XML API using the previously created upload id. + * + * @param {number} partNumber the sequence number of this chunk. + * @param {Buffer} chunk the chunk of data to be uploaded. + * @param {string | false} validation whether or not to include the md5 hash in the headers to cause the server + * to validate the chunk was not corrupted. + * @returns {Promise} + */ + async uploadPart(partNumber, chunk, validation) { + const url = `${this.baseUrl}?partNumber=${partNumber}&uploadId=${this.uploadId}`; + let headers = __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_setGoogApiClientHeaders).call(this); + if (validation === 'md5') { + const hash = (0, crypto_1.createHash)('md5').update(chunk).digest('base64'); + headers = { + 'Content-MD5': hash, + }; + } + return (0, async_retry_1.default)(async (bail) => { + try { + const res = await this.authClient.request({ + url, + method: 'PUT', + body: chunk, + headers, + }); + if (res.data && res.data.error) { + throw res.data.error; + } + this.partsMap.set(partNumber, res.headers['etag']); + } + catch (e) { + __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail); + } + }, this.retryOptions); + } + /** + * Sends the final request of the MPU to tell GCS the upload is now complete. + * + * @returns {Promise} + */ + async completeUpload() { + const url = `${this.baseUrl}?uploadId=${this.uploadId}`; + const sortedMap = new Map([...this.partsMap.entries()].sort((a, b) => a[0] - b[0])); + const parts = []; + for (const entry of sortedMap.entries()) { + parts.push({ PartNumber: entry[0], ETag: entry[1] }); + } + const body = `${this.xmlBuilder.build(parts)}`; + return (0, async_retry_1.default)(async (bail) => { + try { + const res = await this.authClient.request({ + headers: __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_setGoogApiClientHeaders).call(this), + url, + method: 'POST', + body, + }); + if (res.data && res.data.error) { + throw res.data.error; + } + return res; + } + catch (e) { + __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail); + return; + } + }, this.retryOptions); + } + /** + * Aborts an multipart upload that is in progress. Once aborted, any parts in the process of being uploaded fail, + * and future requests using the upload ID fail. + * + * @returns {Promise} + */ + async abortUpload() { + const url = `${this.baseUrl}?uploadId=${this.uploadId}`; + return (0, async_retry_1.default)(async (bail) => { + try { + const res = await this.authClient.request({ + url, + method: 'DELETE', + }); + if (res.data && res.data.error) { + throw res.data.error; + } + } + catch (e) { + __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail); + return; + } + }, this.retryOptions); + } +} +_XMLMultiPartUploadHelper_instances = new WeakSet(), _XMLMultiPartUploadHelper_setGoogApiClientHeaders = function _XMLMultiPartUploadHelper_setGoogApiClientHeaders(headers = {}) { + let headerFound = false; + let userAgentFound = false; + for (const [key, value] of Object.entries(headers)) { + if (key.toLocaleLowerCase().trim() === 'x-goog-api-client') { + headerFound = true; + // Prepend command feature to value, if not already there + if (!value.includes(GCCL_GCS_CMD_FEATURE.UPLOAD_SHARDED)) { + headers[key] = + `${value} gccl-gcs-cmd/${GCCL_GCS_CMD_FEATURE.UPLOAD_SHARDED}`; + } + } + else if (key.toLocaleLowerCase().trim() === 'user-agent') { + userAgentFound = true; + } + } + // If the header isn't present, add it + if (!headerFound) { + headers['x-goog-api-client'] = `${(0, util_js_2.getRuntimeTrackingString)()} gccl/${packageJson.version} gccl-gcs-cmd/${GCCL_GCS_CMD_FEATURE.UPLOAD_SHARDED}`; + } + // If the User-Agent isn't present, add it + if (!userAgentFound) { + headers['User-Agent'] = (0, util_js_2.getUserAgentString)(); + } + return headers; +}, _XMLMultiPartUploadHelper_handleErrorResponse = function _XMLMultiPartUploadHelper_handleErrorResponse(err, bail) { + if (this.bucket.storage.retryOptions.autoRetry && + this.bucket.storage.retryOptions.retryableErrorFn(err)) { + throw err; + } + else { + bail(err); + } +}; +/** + * Create a TransferManager object to perform parallel transfer operations on a Cloud Storage bucket. + * + * @class + * @hideconstructor + * + * @param {Bucket} bucket A {@link Bucket} instance + * + */ +class TransferManager { + constructor(bucket) { + this.bucket = bucket; + } + /** + * @typedef {object} UploadManyFilesOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when uploading the files. + * @property {Function} [customDestinationBuilder] A fuction that will take the current path of a local file + * and return a string representing a custom path to be used to upload the file to GCS. + * @property {boolean} [skipIfExists] Do not upload the file if it already exists in + * the bucket. This will set the precondition ifGenerationMatch = 0. + * @property {string} [prefix] A prefix to append to all of the uploaded files. + * @property {object} [passthroughOptions] {@link UploadOptions} Options to be passed through + * to each individual upload operation. + * + */ + /** + * Upload multiple files in parallel to the bucket. This is a convenience method + * that utilizes {@link Bucket#upload} to perform the upload. + * + * @param {array | string} [filePathsOrDirectory] An array of fully qualified paths to the files or a directory name. + * If a directory name is provided, the directory will be recursively walked and all files will be added to the upload list. + * to be uploaded to the bucket + * @param {UploadManyFilesOptions} [options] Configuration options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Upload multiple files in parallel. + * //- + * const response = await transferManager.uploadManyFiles(['/local/path/file1.txt, 'local/path/file2.txt']); + * // Your bucket now contains: + * // - "local/path/file1.txt" (with the contents of '/local/path/file1.txt') + * // - "local/path/file2.txt" (with the contents of '/local/path/file2.txt') + * const response = await transferManager.uploadManyFiles('/local/directory'); + * // Your bucket will now contain all files contained in '/local/directory' maintaining the subdirectory structure. + * ``` + * + */ + async uploadManyFiles(filePathsOrDirectory, options = {}) { + var _a; + if (options.skipIfExists && ((_a = options.passthroughOptions) === null || _a === void 0 ? void 0 : _a.preconditionOpts)) { + options.passthroughOptions.preconditionOpts.ifGenerationMatch = 0; + } + else if (options.skipIfExists && + options.passthroughOptions === undefined) { + options.passthroughOptions = { + preconditionOpts: { + ifGenerationMatch: 0, + }, + }; + } + const limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_UPLOAD_LIMIT); + const promises = []; + let allPaths = []; + if (!Array.isArray(filePathsOrDirectory)) { + for await (const curPath of this.getPathsFromDirectory(filePathsOrDirectory)) { + allPaths.push(curPath); + } + } + else { + allPaths = filePathsOrDirectory; + } + for (const filePath of allPaths) { + const stat = await fs_1.promises.lstat(filePath); + if (stat.isDirectory()) { + continue; + } + const passThroughOptionsCopy = { + ...options.passthroughOptions, + [util_js_1.GCCL_GCS_CMD_KEY]: GCCL_GCS_CMD_FEATURE.UPLOAD_MANY, + }; + passThroughOptionsCopy.destination = options.customDestinationBuilder + ? options.customDestinationBuilder(filePath, options) + : filePath.split(path.sep).join(path.posix.sep); + if (options.prefix) { + passThroughOptionsCopy.destination = path.posix.join(...options.prefix.split(path.sep), passThroughOptionsCopy.destination); + } + promises.push(limit(() => this.bucket.upload(filePath, passThroughOptionsCopy))); + } + return Promise.all(promises); + } + /** + * @typedef {object} DownloadManyFilesOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when downloading the files. + * @property {string} [prefix] A prefix to append to all of the downloaded files. + * @property {string} [stripPrefix] A prefix to remove from all of the downloaded files. + * @property {object} [passthroughOptions] {@link DownloadOptions} Options to be passed through + * to each individual download operation. + * @property {boolean} [skipIfExists] Do not download the file if it already exists in + * the destination. + * + */ + /** + * Download multiple files in parallel to the local filesystem. This is a convenience method + * that utilizes {@link File#download} to perform the download. + * + * @param {array | string} [filesOrFolder] An array of file name strings or file objects to be downloaded. If + * a string is provided this will be treated as a GCS prefix and all files with that prefix will be downloaded. + * @param {DownloadManyFilesOptions} [options] Configuration options. Setting options.prefix or options.stripPrefix + * or options.passthroughOptions.destination will cause the downloaded files to be written to the file system + * instead of being returned as a buffer. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Download multiple files in parallel. + * //- + * const response = await transferManager.downloadManyFiles(['file1.txt', 'file2.txt']); + * // The following files have been downloaded: + * // - "file1.txt" (with the contents from my-bucket.file1.txt) + * // - "file2.txt" (with the contents from my-bucket.file2.txt) + * const response = await transferManager.downloadManyFiles([bucket.File('file1.txt'), bucket.File('file2.txt')]); + * // The following files have been downloaded: + * // - "file1.txt" (with the contents from my-bucket.file1.txt) + * // - "file2.txt" (with the contents from my-bucket.file2.txt) + * const response = await transferManager.downloadManyFiles('test-folder'); + * // All files with GCS prefix of 'test-folder' have been downloaded. + * ``` + * + */ + async downloadManyFiles(filesOrFolder, options = {}) { + const limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_DOWNLOAD_LIMIT); + const promises = []; + let files = []; + if (!Array.isArray(filesOrFolder)) { + const directoryFiles = await this.bucket.getFiles({ + prefix: filesOrFolder, + }); + files = directoryFiles[0]; + } + else { + files = filesOrFolder.map(curFile => { + if (typeof curFile === 'string') { + return this.bucket.file(curFile); + } + return curFile; + }); + } + const stripRegexString = options.stripPrefix + ? `^${options.stripPrefix}` + : EMPTY_REGEX; + const regex = new RegExp(stripRegexString, 'g'); + for (const file of files) { + const passThroughOptionsCopy = { + ...options.passthroughOptions, + [util_js_1.GCCL_GCS_CMD_KEY]: GCCL_GCS_CMD_FEATURE.DOWNLOAD_MANY, + }; + if (options.prefix || passThroughOptionsCopy.destination) { + passThroughOptionsCopy.destination = path.join(options.prefix || '', passThroughOptionsCopy.destination || '', file.name); + } + if (options.stripPrefix) { + passThroughOptionsCopy.destination = file.name.replace(regex, ''); + } + if (options.skipIfExists && + (0, fs_1.existsSync)(passThroughOptionsCopy.destination || '')) { + continue; + } + promises.push(limit(async () => { + const destination = passThroughOptionsCopy.destination; + if (destination && destination.endsWith(path.sep)) { + await fs_1.promises.mkdir(destination, { recursive: true }); + return Promise.resolve([ + Buffer.alloc(0), + ]); + } + return file.download(passThroughOptionsCopy); + })); + } + return Promise.all(promises); + } + /** + * @typedef {object} DownloadFileInChunksOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when downloading the file. + * @property {number} [chunkSizeBytes] The size in bytes of each chunk to be downloaded. + * @property {string | boolean} [validation] Whether or not to perform a CRC32C validation check when download is complete. + * @property {boolean} [noReturnData] Whether or not to return the downloaded data. A `true` value here would be useful for files with a size that will not fit into memory. + * + */ + /** + * Download a large file in chunks utilizing parallel download operations. This is a convenience method + * that utilizes {@link File#download} to perform the download. + * + * @param {File | string} fileOrName {@link File} to download. + * @param {DownloadFileInChunksOptions} [options] Configuration options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Download a large file in chunks utilizing parallel operations. + * //- + * const response = await transferManager.downloadFileInChunks(bucket.file('large-file.txt'); + * // Your local directory now contains: + * // - "large-file.txt" (with the contents from my-bucket.large-file.txt) + * ``` + * + */ + async downloadFileInChunks(fileOrName, options = {}) { + let chunkSize = options.chunkSizeBytes || DOWNLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE; + let limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_CHUNKED_DOWNLOAD_LIMIT); + const noReturnData = Boolean(options.noReturnData); + const promises = []; + const file = typeof fileOrName === 'string' + ? this.bucket.file(fileOrName) + : fileOrName; + const fileInfo = await file.get(); + const size = parseInt(fileInfo[0].metadata.size.toString()); + // If the file size does not meet the threshold download it as a single chunk. + if (size < DOWNLOAD_IN_CHUNKS_FILE_SIZE_THRESHOLD) { + limit = (0, p_limit_1.default)(1); + chunkSize = size; + } + let start = 0; + const filePath = options.destination || path.basename(file.name); + const fileToWrite = await fs_1.promises.open(filePath, 'w'); + while (start < size) { + const chunkStart = start; + let chunkEnd = start + chunkSize - 1; + chunkEnd = chunkEnd > size ? size : chunkEnd; + promises.push(limit(async () => { + const resp = await file.download({ + start: chunkStart, + end: chunkEnd, + [util_js_1.GCCL_GCS_CMD_KEY]: GCCL_GCS_CMD_FEATURE.DOWNLOAD_SHARDED, + }); + const result = await fileToWrite.write(resp[0], 0, resp[0].length, chunkStart); + if (noReturnData) + return; + return result.buffer; + })); + start += chunkSize; + } + let chunks; + try { + chunks = await Promise.all(promises); + } + finally { + await fileToWrite.close(); + } + if (options.validation === 'crc32c' && fileInfo[0].metadata.crc32c) { + const downloadedCrc32C = await crc32c_js_1.CRC32C.fromFile(filePath); + if (!downloadedCrc32C.validate(fileInfo[0].metadata.crc32c)) { + const mismatchError = new file_js_1.RequestError(file_js_1.FileExceptionMessages.DOWNLOAD_MISMATCH); + mismatchError.code = 'CONTENT_DOWNLOAD_MISMATCH'; + throw mismatchError; + } + } + if (noReturnData) + return; + return [Buffer.concat(chunks, size)]; + } + /** + * @typedef {object} UploadFileInChunksOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when uploading the file. + * @property {number} [chunkSizeBytes] The size in bytes of each chunk to be uploaded. + * @property {string} [uploadName] Name of the file when saving to GCS. If ommitted the name is taken from the file path. + * @property {number} [maxQueueSize] The number of chunks to be uploaded to hold in memory concurrently. If not specified + * defaults to the specified concurrency limit. + * @property {string} [uploadId] If specified attempts to resume a previous upload. + * @property {Map} [partsMap] If specified alongside uploadId, attempts to resume a previous upload from the last chunk + * specified in partsMap + * @property {object} [headers] headers to be sent when initiating the multipart upload. + * See {@link https://cloud.google.com/storage/docs/xml-api/post-object-multipart#request_headers| Request Headers: Initiate a Multipart Upload} + * @property {boolean} [autoAbortFailure] boolean to indicate if an in progress upload session will be automatically aborted upon failure. If not set, + * failures will be automatically aborted. + * + */ + /** + * Upload a large file in chunks utilizing parallel upload opertions. If the upload fails, an uploadId and + * map containing all the successfully uploaded parts will be returned to the caller. These arguments can be used to + * resume the upload. + * + * @param {string} [filePath] The path of the file to be uploaded + * @param {UploadFileInChunksOptions} [options] Configuration options. + * @param {MultiPartHelperGenerator} [generator] A function that will return a type that implements the MPU interface. Most users will not need to use this. + * @returns {Promise} If successful a promise resolving to void, otherwise a error containing the message, uploadid, and parts map. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Upload a large file in chunks utilizing parallel operations. + * //- + * const response = await transferManager.uploadFileInChunks('large-file.txt'); + * // Your bucket now contains: + * // - "large-file.txt" + * ``` + * + * + */ + async uploadFileInChunks(filePath, options = {}, generator = defaultMultiPartGenerator) { + const chunkSize = options.chunkSizeBytes || UPLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE; + const limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_CHUNKED_UPLOAD_LIMIT); + const maxQueueSize = options.maxQueueSize || + options.concurrencyLimit || + DEFAULT_PARALLEL_CHUNKED_UPLOAD_LIMIT; + const fileName = options.uploadName || path.basename(filePath); + const mpuHelper = generator(this.bucket, fileName, options.uploadId, options.partsMap); + let partNumber = 1; + let promises = []; + try { + if (options.uploadId === undefined) { + await mpuHelper.initiateUpload(options.headers); + } + const startOrResumptionByte = mpuHelper.partsMap.size * chunkSize; + const readStream = (0, fs_1.createReadStream)(filePath, { + highWaterMark: chunkSize, + start: startOrResumptionByte, + }); + // p-limit only limits the number of running promises. We do not want to hold an entire + // large file in memory at once so promises acts a queue that will hold only maxQueueSize in memory. + for await (const curChunk of readStream) { + if (promises.length >= maxQueueSize) { + await Promise.all(promises); + promises = []; + } + promises.push(limit(() => mpuHelper.uploadPart(partNumber++, curChunk, options.validation))); + } + await Promise.all(promises); + return await mpuHelper.completeUpload(); + } + catch (e) { + if ((options.autoAbortFailure === undefined || options.autoAbortFailure) && + mpuHelper.uploadId) { + try { + await mpuHelper.abortUpload(); + return; + } + catch (e) { + throw new MultiPartUploadError(e.message, mpuHelper.uploadId, mpuHelper.partsMap); + } + } + throw new MultiPartUploadError(e.message, mpuHelper.uploadId, mpuHelper.partsMap); + } + } + async *getPathsFromDirectory(directory) { + const filesAndSubdirectories = await fs_1.promises.readdir(directory, { + withFileTypes: true, + }); + for (const curFileOrDirectory of filesAndSubdirectories) { + const fullPath = path.join(directory, curFileOrDirectory.name); + curFileOrDirectory.isDirectory() + ? yield* this.getPathsFromDirectory(fullPath) + : yield fullPath; + } + } +} +exports.TransferManager = TransferManager; + + +/***/ }), + +/***/ 4557: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = this && this.__createBinding || (Object.create ? function (o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { + enumerable: true, + get: function () { + return m[k]; + } + }; + } + Object.defineProperty(o, k2, desc); +} : function (o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +}); +var __setModuleDefault = this && this.__setModuleDefault || (Object.create ? function (o, v) { + Object.defineProperty(o, "default", { + enumerable: true, + value: v + }); +} : function (o, v) { + o["default"] = v; +}); +var __importStar = this && this.__importStar || function () { + var ownKeys = function (o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +}(); +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports.PassThroughShim = void 0; +exports.normalize = normalize; +exports.objectEntries = objectEntries; +exports.fixedEncodeURIComponent = fixedEncodeURIComponent; +exports.encodeURI = encodeURI; +exports.qsStringify = qsStringify; +exports.objectKeyToLowercase = objectKeyToLowercase; +exports.unicodeJSONStringify = unicodeJSONStringify; +exports.convertObjKeysToSnakeCase = convertObjKeysToSnakeCase; +exports.formatAsUTCISO = formatAsUTCISO; +exports.getRuntimeTrackingString = getRuntimeTrackingString; +exports.getUserAgentString = getUserAgentString; +exports.getDirName = getDirName; +exports.getModuleFormat = getModuleFormat; +const path = __importStar(__nccwpck_require__(6928)); +const querystring = __importStar(__nccwpck_require__(3480)); +const stream_1 = __nccwpck_require__(2203); +const url = __importStar(__nccwpck_require__(7016)); +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +const package_json_helper_cjs_1 = __nccwpck_require__(1969); +// Done to avoid a problem with mangling of identifiers when using esModuleInterop +const fileURLToPath = url.fileURLToPath; +const isEsm = false; +function normalize(optionsOrCallback, cb) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + return { + options, + callback + }; +} +/** + * Flatten an object into an Array of arrays, [[key, value], ..]. + * Implements Object.entries() for Node.js <8 + * @internal + */ +function objectEntries(obj) { + return Object.keys(obj).map(key => [key, obj[key]]); +} +/** + * Encode `str` with encodeURIComponent, plus these + * reserved characters: `! * ' ( )`. + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/encodeURIComponent| MDN: fixedEncodeURIComponent} + * + * @param {string} str The URI component to encode. + * @return {string} The encoded string. + */ +function fixedEncodeURIComponent(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, c => '%' + c.charCodeAt(0).toString(16).toUpperCase()); +} +/** + * URI encode `uri` for generating signed URLs, using fixedEncodeURIComponent. + * + * Encode every byte except `A-Z a-Z 0-9 ~ - . _`. + * + * @param {string} uri The URI to encode. + * @param [boolean=false] encodeSlash If `true`, the "/" character is not encoded. + * @return {string} The encoded string. + */ +function encodeURI(uri, encodeSlash) { + // Split the string by `/`, and conditionally rejoin them with either + // %2F if encodeSlash is `true`, or '/' if `false`. + return uri.split('/').map(fixedEncodeURIComponent).join(encodeSlash ? '%2F' : '/'); +} +/** + * Serialize an object to a URL query string using util.encodeURI(uri, true). + * @param {string} url The object to serialize. + * @return {string} Serialized string. + */ +function qsStringify(qs) { + return querystring.stringify(qs, '&', '=', { + encodeURIComponent: component => encodeURI(component, true) + }); +} +function objectKeyToLowercase(object) { + const newObj = {}; + for (let key of Object.keys(object)) { + const value = object[key]; + key = key.toLowerCase(); + newObj[key] = value; + } + return newObj; +} +/** + * JSON encode str, with unicode \u+ representation. + * @param {object} obj The object to encode. + * @return {string} Serialized string. + */ +function unicodeJSONStringify(obj) { + return JSON.stringify(obj).replace(/[\u0080-\uFFFF]/g, char => '\\u' + ('0000' + char.charCodeAt(0).toString(16)).slice(-4)); +} +/** + * Converts the given objects keys to snake_case + * @param {object} obj object to convert keys to snake case. + * @returns {object} object with keys converted to snake case. + */ +function convertObjKeysToSnakeCase(obj) { + if (obj instanceof Date || obj instanceof RegExp) { + return obj; + } + if (Array.isArray(obj)) { + return obj.map(convertObjKeysToSnakeCase); + } + if (obj instanceof Object) { + return Object.keys(obj).reduce((acc, cur) => { + const s = cur[0].toLocaleLowerCase() + cur.slice(1).replace(/([A-Z]+)/g, (match, p1) => { + return `_${p1.toLowerCase()}`; + }); + acc[s] = convertObjKeysToSnakeCase(obj[cur]); + return acc; + }, Object()); + } + return obj; +} +/** + * Formats the provided date object as a UTC ISO string. + * @param {Date} dateTimeToFormat date object to be formatted. + * @param {boolean} includeTime flag to include hours, minutes, seconds in output. + * @param {string} dateDelimiter delimiter between date components. + * @param {string} timeDelimiter delimiter between time components. + * @returns {string} UTC ISO format of provided date obect. + */ +function formatAsUTCISO(dateTimeToFormat, includeTime = false, dateDelimiter = '', timeDelimiter = '') { + const year = dateTimeToFormat.getUTCFullYear(); + const month = dateTimeToFormat.getUTCMonth() + 1; + const day = dateTimeToFormat.getUTCDate(); + const hour = dateTimeToFormat.getUTCHours(); + const minute = dateTimeToFormat.getUTCMinutes(); + const second = dateTimeToFormat.getUTCSeconds(); + let resultString = `${year.toString().padStart(4, '0')}${dateDelimiter}${month.toString().padStart(2, '0')}${dateDelimiter}${day.toString().padStart(2, '0')}`; + if (includeTime) { + resultString = `${resultString}T${hour.toString().padStart(2, '0')}${timeDelimiter}${minute.toString().padStart(2, '0')}${timeDelimiter}${second.toString().padStart(2, '0')}Z`; + } + return resultString; +} +/** + * Examines the runtime environment and returns the appropriate tracking string. + * @returns {string} metrics tracking string based on the current runtime environment. + */ +function getRuntimeTrackingString() { + if ( + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + globalThis.Deno && + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + globalThis.Deno.version && + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + globalThis.Deno.version.deno) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + return `gl-deno/${globalThis.Deno.version.deno}`; + } else { + return `gl-node/${process.versions.node}`; + } +} +/** + * Looks at package.json and creates the user-agent string to be applied to request headers. + * @returns {string} user agent string. + */ +function getUserAgentString() { + const pkg = (0, package_json_helper_cjs_1.getPackageJSON)(); + const hyphenatedPackageName = pkg.name.replace('@google-cloud', 'gcloud-node') // For legacy purposes. + .replace('/', '-'); // For UA spec-compliance purposes. + return hyphenatedPackageName + '/' + pkg.version; +} +function getDirName() { + let dirToUse = ''; + try { + dirToUse = __dirname; + } catch (e) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + dirToUse = __dirname; + } + return dirToUse; +} +function getModuleFormat() { + return isEsm ? 'ESM' : 'CJS'; +} +class PassThroughShim extends stream_1.PassThrough { + constructor() { + super(...arguments); + this.shouldEmitReading = true; + this.shouldEmitWriting = true; + } + _read(size) { + if (this.shouldEmitReading) { + this.emit('reading'); + this.shouldEmitReading = false; + } + super._read(size); + } + _write(chunk, encoding, callback) { + if (this.shouldEmitWriting) { + this.emit('writing'); + this.shouldEmitWriting = false; + } + // Per the nodejs documention, callback must be invoked on the next tick + process.nextTick(() => { + super._write(chunk, encoding, callback); + }); + } + _final(callback) { + // If the stream is empty (i.e. empty file) final will be invoked before _read / _write + // and we should still emit the proper events. + if (this.shouldEmitReading) { + this.emit('reading'); + this.shouldEmitReading = false; + } + if (this.shouldEmitWriting) { + this.emit('writing'); + this.shouldEmitWriting = false; + } + callback(null); + } +} +exports.PassThroughShim = PassThroughShim; + + +/***/ }), + +/***/ 3660: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __assign = (this && this.__assign) || function () { + __assign = Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; + }; + return __assign.apply(this, arguments); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.encode = encode; +exports.decodeEntity = decodeEntity; +exports.decode = decode; +var named_references_js_1 = __nccwpck_require__(6000); +var numeric_unicode_map_js_1 = __nccwpck_require__(1057); +var surrogate_pairs_js_1 = __nccwpck_require__(9718); +var allNamedReferences = __assign(__assign({}, named_references_js_1.namedReferences), { all: named_references_js_1.namedReferences.html5 }); +var encodeRegExps = { + specialChars: /[<>'"&]/g, + nonAscii: /[<>'"&\u0080-\uD7FF\uE000-\uFFFF\uDC00-\uDFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]?/g, + nonAsciiPrintable: /[<>'"&\x01-\x08\x11-\x15\x17-\x1F\x7f-\uD7FF\uE000-\uFFFF\uDC00-\uDFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]?/g, + nonAsciiPrintableOnly: /[\x01-\x08\x11-\x15\x17-\x1F\x7f-\uD7FF\uE000-\uFFFF\uDC00-\uDFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]?/g, + extensive: /[\x01-\x0c\x0e-\x1f\x21-\x2c\x2e-\x2f\x3a-\x40\x5b-\x60\x7b-\x7d\x7f-\uD7FF\uE000-\uFFFF\uDC00-\uDFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]?/g +}; +var defaultEncodeOptions = { + mode: 'specialChars', + level: 'all', + numeric: 'decimal' +}; +/** Encodes all the necessary (specified by `level`) characters in the text */ +function encode(text, _a) { + var _b = _a === void 0 ? defaultEncodeOptions : _a, _c = _b.mode, mode = _c === void 0 ? 'specialChars' : _c, _d = _b.numeric, numeric = _d === void 0 ? 'decimal' : _d, _e = _b.level, level = _e === void 0 ? 'all' : _e; + if (!text) { + return ''; + } + var encodeRegExp = encodeRegExps[mode]; + var references = allNamedReferences[level].characters; + var isHex = numeric === 'hexadecimal'; + return String.prototype.replace.call(text, encodeRegExp, function (input) { + var result = references[input]; + if (!result) { + var code = input.length > 1 ? (0, surrogate_pairs_js_1.getCodePoint)(input, 0) : input.charCodeAt(0); + result = (isHex ? '&#x' + code.toString(16) : '&#' + code) + ';'; + } + return result; + }); +} +var defaultDecodeOptions = { + scope: 'body', + level: 'all' +}; +var strict = /&(?:#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+);/g; +var attribute = /&(?:#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+)[;=]?/g; +var baseDecodeRegExps = { + xml: { + strict: strict, + attribute: attribute, + body: named_references_js_1.bodyRegExps.xml + }, + html4: { + strict: strict, + attribute: attribute, + body: named_references_js_1.bodyRegExps.html4 + }, + html5: { + strict: strict, + attribute: attribute, + body: named_references_js_1.bodyRegExps.html5 + } +}; +var decodeRegExps = __assign(__assign({}, baseDecodeRegExps), { all: baseDecodeRegExps.html5 }); +var fromCharCode = String.fromCharCode; +var outOfBoundsChar = fromCharCode(65533); +var defaultDecodeEntityOptions = { + level: 'all' +}; +function getDecodedEntity(entity, references, isAttribute, isStrict) { + var decodeResult = entity; + var decodeEntityLastChar = entity[entity.length - 1]; + if (isAttribute && decodeEntityLastChar === '=') { + decodeResult = entity; + } + else if (isStrict && decodeEntityLastChar !== ';') { + decodeResult = entity; + } + else { + var decodeResultByReference = references[entity]; + if (decodeResultByReference) { + decodeResult = decodeResultByReference; + } + else if (entity[0] === '&' && entity[1] === '#') { + var decodeSecondChar = entity[2]; + var decodeCode = decodeSecondChar == 'x' || decodeSecondChar == 'X' + ? parseInt(entity.substr(3), 16) + : parseInt(entity.substr(2)); + decodeResult = + decodeCode >= 0x10ffff + ? outOfBoundsChar + : decodeCode > 65535 + ? (0, surrogate_pairs_js_1.fromCodePoint)(decodeCode) + : fromCharCode(numeric_unicode_map_js_1.numericUnicodeMap[decodeCode] || decodeCode); + } + } + return decodeResult; +} +/** Decodes a single entity */ +function decodeEntity(entity, _a) { + var _b = _a === void 0 ? defaultDecodeEntityOptions : _a, _c = _b.level, level = _c === void 0 ? 'all' : _c; + if (!entity) { + return ''; + } + return getDecodedEntity(entity, allNamedReferences[level].entities, false, false); +} +/** Decodes all entities in the text */ +function decode(text, _a) { + var _b = _a === void 0 ? defaultDecodeOptions : _a, _c = _b.level, level = _c === void 0 ? 'all' : _c, _d = _b.scope, scope = _d === void 0 ? level === 'xml' ? 'strict' : 'body' : _d; + if (!text) { + return ''; + } + var decodeRegExp = decodeRegExps[level][scope]; + var references = allNamedReferences[level].entities; + var isAttribute = scope === 'attribute'; + var isStrict = scope === 'strict'; + return text.replace(decodeRegExp, function (entity) { return getDecodedEntity(entity, references, isAttribute, isStrict); }); +} +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 6000: +/***/ (function(__unused_webpack_module, exports) { + +"use strict"; + +var __assign = (this && this.__assign) || function () { + __assign = Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; + }; + return __assign.apply(this, arguments); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.namedReferences = exports.bodyRegExps = void 0; +// This file is autogenerated by tools/process-named-references.ts +var pairDivider = "~"; +var blockDivider = "~~"; +function generateNamedReferences(input, prev) { + var entities = {}; + var characters = {}; + var blocks = input.split(blockDivider); + var isOptionalBlock = false; + for (var i = 0; blocks.length > i; i++) { + var entries = blocks[i].split(pairDivider); + for (var j = 0; j < entries.length; j += 2) { + var entity = entries[j]; + var character = entries[j + 1]; + var fullEntity = '&' + entity + ';'; + entities[fullEntity] = character; + if (isOptionalBlock) { + entities['&' + entity] = character; + } + characters[character] = fullEntity; + } + isOptionalBlock = true; + } + return prev ? + { entities: __assign(__assign({}, entities), prev.entities), characters: __assign(__assign({}, characters), prev.characters) } : + { entities: entities, characters: characters }; +} +exports.bodyRegExps = { + xml: /&(?:#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+);?/g, + html4: /∉|&(?:nbsp|iexcl|cent|pound|curren|yen|brvbar|sect|uml|copy|ordf|laquo|not|shy|reg|macr|deg|plusmn|sup2|sup3|acute|micro|para|middot|cedil|sup1|ordm|raquo|frac14|frac12|frac34|iquest|Agrave|Aacute|Acirc|Atilde|Auml|Aring|AElig|Ccedil|Egrave|Eacute|Ecirc|Euml|Igrave|Iacute|Icirc|Iuml|ETH|Ntilde|Ograve|Oacute|Ocirc|Otilde|Ouml|times|Oslash|Ugrave|Uacute|Ucirc|Uuml|Yacute|THORN|szlig|agrave|aacute|acirc|atilde|auml|aring|aelig|ccedil|egrave|eacute|ecirc|euml|igrave|iacute|icirc|iuml|eth|ntilde|ograve|oacute|ocirc|otilde|ouml|divide|oslash|ugrave|uacute|ucirc|uuml|yacute|thorn|yuml|quot|amp|lt|gt|#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+);?/g, + html5: /·|℗|⋇|⪧|⩺|⋗|⦕|⩼|⪆|⥸|⋗|⋛|⪌|≷|≳|⪦|⩹|⋖|⋋|⋉|⥶|⩻|⦖|◃|⊴|◂|∉|⋹̸|⋵̸|∉|⋷|⋶|∌|∌|⋾|⋽|∥|⊠|⨱|⨰|&(?:AElig|AMP|Aacute|Acirc|Agrave|Aring|Atilde|Auml|COPY|Ccedil|ETH|Eacute|Ecirc|Egrave|Euml|GT|Iacute|Icirc|Igrave|Iuml|LT|Ntilde|Oacute|Ocirc|Ograve|Oslash|Otilde|Ouml|QUOT|REG|THORN|Uacute|Ucirc|Ugrave|Uuml|Yacute|aacute|acirc|acute|aelig|agrave|amp|aring|atilde|auml|brvbar|ccedil|cedil|cent|copy|curren|deg|divide|eacute|ecirc|egrave|eth|euml|frac12|frac14|frac34|gt|iacute|icirc|iexcl|igrave|iquest|iuml|laquo|lt|macr|micro|middot|nbsp|not|ntilde|oacute|ocirc|ograve|ordf|ordm|oslash|otilde|ouml|para|plusmn|pound|quot|raquo|reg|sect|shy|sup1|sup2|sup3|szlig|thorn|times|uacute|ucirc|ugrave|uml|uuml|yacute|yen|yuml|#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+);?/g +}; +exports.namedReferences = {}; +exports.namedReferences.xml = generateNamedReferences("lt~<~gt~>~quot~\"~apos~'~amp~&"); +exports.namedReferences.html4 = generateNamedReferences("apos~'~OElig~Œ~oelig~œ~Scaron~Š~scaron~š~Yuml~Ÿ~circ~ˆ~tilde~˜~ensp~ ~emsp~ ~thinsp~ ~zwnj~‌~zwj~‍~lrm~‎~rlm~‏~ndash~–~mdash~—~lsquo~‘~rsquo~’~sbquo~‚~ldquo~“~rdquo~”~bdquo~„~dagger~†~Dagger~‡~permil~‰~lsaquo~‹~rsaquo~›~euro~€~fnof~ƒ~Alpha~Α~Beta~Β~Gamma~Γ~Delta~Δ~Epsilon~Ε~Zeta~Ζ~Eta~Η~Theta~Θ~Iota~Ι~Kappa~Κ~Lambda~Λ~Mu~Μ~Nu~Ν~Xi~Ξ~Omicron~Ο~Pi~Π~Rho~Ρ~Sigma~Σ~Tau~Τ~Upsilon~Υ~Phi~Φ~Chi~Χ~Psi~Ψ~Omega~Ω~alpha~α~beta~β~gamma~γ~delta~δ~epsilon~ε~zeta~ζ~eta~η~theta~θ~iota~ι~kappa~κ~lambda~λ~mu~μ~nu~ν~xi~ξ~omicron~ο~pi~π~rho~ρ~sigmaf~ς~sigma~σ~tau~τ~upsilon~υ~phi~φ~chi~χ~psi~ψ~omega~ω~thetasym~ϑ~upsih~ϒ~piv~ϖ~bull~•~hellip~…~prime~′~Prime~″~oline~‾~frasl~⁄~weierp~℘~image~ℑ~real~ℜ~trade~™~alefsym~ℵ~larr~←~uarr~↑~rarr~→~darr~↓~harr~↔~crarr~↵~lArr~⇐~uArr~⇑~rArr~⇒~dArr~⇓~hArr~⇔~forall~∀~part~∂~exist~∃~empty~∅~nabla~∇~isin~∈~notin~∉~ni~∋~prod~∏~sum~∑~minus~−~lowast~∗~radic~√~prop~∝~infin~∞~ang~∠~and~∧~or~∨~cap~∩~cup~∪~int~∫~there4~∴~sim~∼~cong~≅~asymp~≈~ne~≠~equiv~≡~le~≤~ge~≥~sub~⊂~sup~⊃~nsub~⊄~sube~⊆~supe~⊇~oplus~⊕~otimes~⊗~perp~⊥~sdot~⋅~lceil~⌈~rceil~⌉~lfloor~⌊~rfloor~⌋~lang~〈~rang~〉~loz~◊~spades~♠~clubs~♣~hearts~♥~diams~♦~~nbsp~ ~iexcl~¡~cent~¢~pound~£~curren~¤~yen~¥~brvbar~¦~sect~§~uml~¨~copy~©~ordf~ª~laquo~«~not~¬~shy~­~reg~®~macr~¯~deg~°~plusmn~±~sup2~²~sup3~³~acute~´~micro~µ~para~¶~middot~·~cedil~¸~sup1~¹~ordm~º~raquo~»~frac14~¼~frac12~½~frac34~¾~iquest~¿~Agrave~À~Aacute~Á~Acirc~Â~Atilde~Ã~Auml~Ä~Aring~Å~AElig~Æ~Ccedil~Ç~Egrave~È~Eacute~É~Ecirc~Ê~Euml~Ë~Igrave~Ì~Iacute~Í~Icirc~Î~Iuml~Ï~ETH~Ð~Ntilde~Ñ~Ograve~Ò~Oacute~Ó~Ocirc~Ô~Otilde~Õ~Ouml~Ö~times~×~Oslash~Ø~Ugrave~Ù~Uacute~Ú~Ucirc~Û~Uuml~Ü~Yacute~Ý~THORN~Þ~szlig~ß~agrave~à~aacute~á~acirc~â~atilde~ã~auml~ä~aring~å~aelig~æ~ccedil~ç~egrave~è~eacute~é~ecirc~ê~euml~ë~igrave~ì~iacute~í~icirc~î~iuml~ï~eth~ð~ntilde~ñ~ograve~ò~oacute~ó~ocirc~ô~otilde~õ~ouml~ö~divide~÷~oslash~ø~ugrave~ù~uacute~ú~ucirc~û~uuml~ü~yacute~ý~thorn~þ~yuml~ÿ~quot~\"~amp~&~lt~<~gt~>"); +exports.namedReferences.html5 = generateNamedReferences("Abreve~Ă~Acy~А~Afr~𝔄~Amacr~Ā~And~⩓~Aogon~Ą~Aopf~𝔸~ApplyFunction~⁡~Ascr~𝒜~Assign~≔~Backslash~∖~Barv~⫧~Barwed~⌆~Bcy~Б~Because~∵~Bernoullis~ℬ~Bfr~𝔅~Bopf~𝔹~Breve~˘~Bscr~ℬ~Bumpeq~≎~CHcy~Ч~Cacute~Ć~Cap~⋒~CapitalDifferentialD~ⅅ~Cayleys~ℭ~Ccaron~Č~Ccirc~Ĉ~Cconint~∰~Cdot~Ċ~Cedilla~¸~CenterDot~·~Cfr~ℭ~CircleDot~⊙~CircleMinus~⊖~CirclePlus~⊕~CircleTimes~⊗~ClockwiseContourIntegral~∲~CloseCurlyDoubleQuote~”~CloseCurlyQuote~’~Colon~∷~Colone~⩴~Congruent~≡~Conint~∯~ContourIntegral~∮~Copf~ℂ~Coproduct~∐~CounterClockwiseContourIntegral~∳~Cross~⨯~Cscr~𝒞~Cup~⋓~CupCap~≍~DD~ⅅ~DDotrahd~⤑~DJcy~Ђ~DScy~Ѕ~DZcy~Џ~Darr~↡~Dashv~⫤~Dcaron~Ď~Dcy~Д~Del~∇~Dfr~𝔇~DiacriticalAcute~´~DiacriticalDot~˙~DiacriticalDoubleAcute~˝~DiacriticalGrave~`~DiacriticalTilde~˜~Diamond~⋄~DifferentialD~ⅆ~Dopf~𝔻~Dot~¨~DotDot~⃜~DotEqual~≐~DoubleContourIntegral~∯~DoubleDot~¨~DoubleDownArrow~⇓~DoubleLeftArrow~⇐~DoubleLeftRightArrow~⇔~DoubleLeftTee~⫤~DoubleLongLeftArrow~⟸~DoubleLongLeftRightArrow~⟺~DoubleLongRightArrow~⟹~DoubleRightArrow~⇒~DoubleRightTee~⊨~DoubleUpArrow~⇑~DoubleUpDownArrow~⇕~DoubleVerticalBar~∥~DownArrow~↓~DownArrowBar~⤓~DownArrowUpArrow~⇵~DownBreve~̑~DownLeftRightVector~⥐~DownLeftTeeVector~⥞~DownLeftVector~↽~DownLeftVectorBar~⥖~DownRightTeeVector~⥟~DownRightVector~⇁~DownRightVectorBar~⥗~DownTee~⊤~DownTeeArrow~↧~Downarrow~⇓~Dscr~𝒟~Dstrok~Đ~ENG~Ŋ~Ecaron~Ě~Ecy~Э~Edot~Ė~Efr~𝔈~Element~∈~Emacr~Ē~EmptySmallSquare~◻~EmptyVerySmallSquare~▫~Eogon~Ę~Eopf~𝔼~Equal~⩵~EqualTilde~≂~Equilibrium~⇌~Escr~ℰ~Esim~⩳~Exists~∃~ExponentialE~ⅇ~Fcy~Ф~Ffr~𝔉~FilledSmallSquare~◼~FilledVerySmallSquare~▪~Fopf~𝔽~ForAll~∀~Fouriertrf~ℱ~Fscr~ℱ~GJcy~Ѓ~Gammad~Ϝ~Gbreve~Ğ~Gcedil~Ģ~Gcirc~Ĝ~Gcy~Г~Gdot~Ġ~Gfr~𝔊~Gg~⋙~Gopf~𝔾~GreaterEqual~≥~GreaterEqualLess~⋛~GreaterFullEqual~≧~GreaterGreater~⪢~GreaterLess~≷~GreaterSlantEqual~⩾~GreaterTilde~≳~Gscr~𝒢~Gt~≫~HARDcy~Ъ~Hacek~ˇ~Hat~^~Hcirc~Ĥ~Hfr~ℌ~HilbertSpace~ℋ~Hopf~ℍ~HorizontalLine~─~Hscr~ℋ~Hstrok~Ħ~HumpDownHump~≎~HumpEqual~≏~IEcy~Е~IJlig~IJ~IOcy~Ё~Icy~И~Idot~İ~Ifr~ℑ~Im~ℑ~Imacr~Ī~ImaginaryI~ⅈ~Implies~⇒~Int~∬~Integral~∫~Intersection~⋂~InvisibleComma~⁣~InvisibleTimes~⁢~Iogon~Į~Iopf~𝕀~Iscr~ℐ~Itilde~Ĩ~Iukcy~І~Jcirc~Ĵ~Jcy~Й~Jfr~𝔍~Jopf~𝕁~Jscr~𝒥~Jsercy~Ј~Jukcy~Є~KHcy~Х~KJcy~Ќ~Kcedil~Ķ~Kcy~К~Kfr~𝔎~Kopf~𝕂~Kscr~𝒦~LJcy~Љ~Lacute~Ĺ~Lang~⟪~Laplacetrf~ℒ~Larr~↞~Lcaron~Ľ~Lcedil~Ļ~Lcy~Л~LeftAngleBracket~⟨~LeftArrow~←~LeftArrowBar~⇤~LeftArrowRightArrow~⇆~LeftCeiling~⌈~LeftDoubleBracket~⟦~LeftDownTeeVector~⥡~LeftDownVector~⇃~LeftDownVectorBar~⥙~LeftFloor~⌊~LeftRightArrow~↔~LeftRightVector~⥎~LeftTee~⊣~LeftTeeArrow~↤~LeftTeeVector~⥚~LeftTriangle~⊲~LeftTriangleBar~⧏~LeftTriangleEqual~⊴~LeftUpDownVector~⥑~LeftUpTeeVector~⥠~LeftUpVector~↿~LeftUpVectorBar~⥘~LeftVector~↼~LeftVectorBar~⥒~Leftarrow~⇐~Leftrightarrow~⇔~LessEqualGreater~⋚~LessFullEqual~≦~LessGreater~≶~LessLess~⪡~LessSlantEqual~⩽~LessTilde~≲~Lfr~𝔏~Ll~⋘~Lleftarrow~⇚~Lmidot~Ŀ~LongLeftArrow~⟵~LongLeftRightArrow~⟷~LongRightArrow~⟶~Longleftarrow~⟸~Longleftrightarrow~⟺~Longrightarrow~⟹~Lopf~𝕃~LowerLeftArrow~↙~LowerRightArrow~↘~Lscr~ℒ~Lsh~↰~Lstrok~Ł~Lt~≪~Map~⤅~Mcy~М~MediumSpace~ ~Mellintrf~ℳ~Mfr~𝔐~MinusPlus~∓~Mopf~𝕄~Mscr~ℳ~NJcy~Њ~Nacute~Ń~Ncaron~Ň~Ncedil~Ņ~Ncy~Н~NegativeMediumSpace~​~NegativeThickSpace~​~NegativeThinSpace~​~NegativeVeryThinSpace~​~NestedGreaterGreater~≫~NestedLessLess~≪~NewLine~\n~Nfr~𝔑~NoBreak~⁠~NonBreakingSpace~ ~Nopf~ℕ~Not~⫬~NotCongruent~≢~NotCupCap~≭~NotDoubleVerticalBar~∦~NotElement~∉~NotEqual~≠~NotEqualTilde~≂̸~NotExists~∄~NotGreater~≯~NotGreaterEqual~≱~NotGreaterFullEqual~≧̸~NotGreaterGreater~≫̸~NotGreaterLess~≹~NotGreaterSlantEqual~⩾̸~NotGreaterTilde~≵~NotHumpDownHump~≎̸~NotHumpEqual~≏̸~NotLeftTriangle~⋪~NotLeftTriangleBar~⧏̸~NotLeftTriangleEqual~⋬~NotLess~≮~NotLessEqual~≰~NotLessGreater~≸~NotLessLess~≪̸~NotLessSlantEqual~⩽̸~NotLessTilde~≴~NotNestedGreaterGreater~⪢̸~NotNestedLessLess~⪡̸~NotPrecedes~⊀~NotPrecedesEqual~⪯̸~NotPrecedesSlantEqual~⋠~NotReverseElement~∌~NotRightTriangle~⋫~NotRightTriangleBar~⧐̸~NotRightTriangleEqual~⋭~NotSquareSubset~⊏̸~NotSquareSubsetEqual~⋢~NotSquareSuperset~⊐̸~NotSquareSupersetEqual~⋣~NotSubset~⊂⃒~NotSubsetEqual~⊈~NotSucceeds~⊁~NotSucceedsEqual~⪰̸~NotSucceedsSlantEqual~⋡~NotSucceedsTilde~≿̸~NotSuperset~⊃⃒~NotSupersetEqual~⊉~NotTilde~≁~NotTildeEqual~≄~NotTildeFullEqual~≇~NotTildeTilde~≉~NotVerticalBar~∤~Nscr~𝒩~Ocy~О~Odblac~Ő~Ofr~𝔒~Omacr~Ō~Oopf~𝕆~OpenCurlyDoubleQuote~“~OpenCurlyQuote~‘~Or~⩔~Oscr~𝒪~Otimes~⨷~OverBar~‾~OverBrace~⏞~OverBracket~⎴~OverParenthesis~⏜~PartialD~∂~Pcy~П~Pfr~𝔓~PlusMinus~±~Poincareplane~ℌ~Popf~ℙ~Pr~⪻~Precedes~≺~PrecedesEqual~⪯~PrecedesSlantEqual~≼~PrecedesTilde~≾~Product~∏~Proportion~∷~Proportional~∝~Pscr~𝒫~Qfr~𝔔~Qopf~ℚ~Qscr~𝒬~RBarr~⤐~Racute~Ŕ~Rang~⟫~Rarr~↠~Rarrtl~⤖~Rcaron~Ř~Rcedil~Ŗ~Rcy~Р~Re~ℜ~ReverseElement~∋~ReverseEquilibrium~⇋~ReverseUpEquilibrium~⥯~Rfr~ℜ~RightAngleBracket~⟩~RightArrow~→~RightArrowBar~⇥~RightArrowLeftArrow~⇄~RightCeiling~⌉~RightDoubleBracket~⟧~RightDownTeeVector~⥝~RightDownVector~⇂~RightDownVectorBar~⥕~RightFloor~⌋~RightTee~⊢~RightTeeArrow~↦~RightTeeVector~⥛~RightTriangle~⊳~RightTriangleBar~⧐~RightTriangleEqual~⊵~RightUpDownVector~⥏~RightUpTeeVector~⥜~RightUpVector~↾~RightUpVectorBar~⥔~RightVector~⇀~RightVectorBar~⥓~Rightarrow~⇒~Ropf~ℝ~RoundImplies~⥰~Rrightarrow~⇛~Rscr~ℛ~Rsh~↱~RuleDelayed~⧴~SHCHcy~Щ~SHcy~Ш~SOFTcy~Ь~Sacute~Ś~Sc~⪼~Scedil~Ş~Scirc~Ŝ~Scy~С~Sfr~𝔖~ShortDownArrow~↓~ShortLeftArrow~←~ShortRightArrow~→~ShortUpArrow~↑~SmallCircle~∘~Sopf~𝕊~Sqrt~√~Square~□~SquareIntersection~⊓~SquareSubset~⊏~SquareSubsetEqual~⊑~SquareSuperset~⊐~SquareSupersetEqual~⊒~SquareUnion~⊔~Sscr~𝒮~Star~⋆~Sub~⋐~Subset~⋐~SubsetEqual~⊆~Succeeds~≻~SucceedsEqual~⪰~SucceedsSlantEqual~≽~SucceedsTilde~≿~SuchThat~∋~Sum~∑~Sup~⋑~Superset~⊃~SupersetEqual~⊇~Supset~⋑~TRADE~™~TSHcy~Ћ~TScy~Ц~Tab~\t~Tcaron~Ť~Tcedil~Ţ~Tcy~Т~Tfr~𝔗~Therefore~∴~ThickSpace~  ~ThinSpace~ ~Tilde~∼~TildeEqual~≃~TildeFullEqual~≅~TildeTilde~≈~Topf~𝕋~TripleDot~⃛~Tscr~𝒯~Tstrok~Ŧ~Uarr~↟~Uarrocir~⥉~Ubrcy~Ў~Ubreve~Ŭ~Ucy~У~Udblac~Ű~Ufr~𝔘~Umacr~Ū~UnderBar~_~UnderBrace~⏟~UnderBracket~⎵~UnderParenthesis~⏝~Union~⋃~UnionPlus~⊎~Uogon~Ų~Uopf~𝕌~UpArrow~↑~UpArrowBar~⤒~UpArrowDownArrow~⇅~UpDownArrow~↕~UpEquilibrium~⥮~UpTee~⊥~UpTeeArrow~↥~Uparrow~⇑~Updownarrow~⇕~UpperLeftArrow~↖~UpperRightArrow~↗~Upsi~ϒ~Uring~Ů~Uscr~𝒰~Utilde~Ũ~VDash~⊫~Vbar~⫫~Vcy~В~Vdash~⊩~Vdashl~⫦~Vee~⋁~Verbar~‖~Vert~‖~VerticalBar~∣~VerticalLine~|~VerticalSeparator~❘~VerticalTilde~≀~VeryThinSpace~ ~Vfr~𝔙~Vopf~𝕍~Vscr~𝒱~Vvdash~⊪~Wcirc~Ŵ~Wedge~⋀~Wfr~𝔚~Wopf~𝕎~Wscr~𝒲~Xfr~𝔛~Xopf~𝕏~Xscr~𝒳~YAcy~Я~YIcy~Ї~YUcy~Ю~Ycirc~Ŷ~Ycy~Ы~Yfr~𝔜~Yopf~𝕐~Yscr~𝒴~ZHcy~Ж~Zacute~Ź~Zcaron~Ž~Zcy~З~Zdot~Ż~ZeroWidthSpace~​~Zfr~ℨ~Zopf~ℤ~Zscr~𝒵~abreve~ă~ac~∾~acE~∾̳~acd~∿~acy~а~af~⁡~afr~𝔞~aleph~ℵ~amacr~ā~amalg~⨿~andand~⩕~andd~⩜~andslope~⩘~andv~⩚~ange~⦤~angle~∠~angmsd~∡~angmsdaa~⦨~angmsdab~⦩~angmsdac~⦪~angmsdad~⦫~angmsdae~⦬~angmsdaf~⦭~angmsdag~⦮~angmsdah~⦯~angrt~∟~angrtvb~⊾~angrtvbd~⦝~angsph~∢~angst~Å~angzarr~⍼~aogon~ą~aopf~𝕒~ap~≈~apE~⩰~apacir~⩯~ape~≊~apid~≋~approx~≈~approxeq~≊~ascr~𝒶~ast~*~asympeq~≍~awconint~∳~awint~⨑~bNot~⫭~backcong~≌~backepsilon~϶~backprime~‵~backsim~∽~backsimeq~⋍~barvee~⊽~barwed~⌅~barwedge~⌅~bbrk~⎵~bbrktbrk~⎶~bcong~≌~bcy~б~becaus~∵~because~∵~bemptyv~⦰~bepsi~϶~bernou~ℬ~beth~ℶ~between~≬~bfr~𝔟~bigcap~⋂~bigcirc~◯~bigcup~⋃~bigodot~⨀~bigoplus~⨁~bigotimes~⨂~bigsqcup~⨆~bigstar~★~bigtriangledown~▽~bigtriangleup~△~biguplus~⨄~bigvee~⋁~bigwedge~⋀~bkarow~⤍~blacklozenge~⧫~blacksquare~▪~blacktriangle~▴~blacktriangledown~▾~blacktriangleleft~◂~blacktriangleright~▸~blank~␣~blk12~▒~blk14~░~blk34~▓~block~█~bne~=⃥~bnequiv~≡⃥~bnot~⌐~bopf~𝕓~bot~⊥~bottom~⊥~bowtie~⋈~boxDL~╗~boxDR~╔~boxDl~╖~boxDr~╓~boxH~═~boxHD~╦~boxHU~╩~boxHd~╤~boxHu~╧~boxUL~╝~boxUR~╚~boxUl~╜~boxUr~╙~boxV~║~boxVH~╬~boxVL~╣~boxVR~╠~boxVh~╫~boxVl~╢~boxVr~╟~boxbox~⧉~boxdL~╕~boxdR~╒~boxdl~┐~boxdr~┌~boxh~─~boxhD~╥~boxhU~╨~boxhd~┬~boxhu~┴~boxminus~⊟~boxplus~⊞~boxtimes~⊠~boxuL~╛~boxuR~╘~boxul~┘~boxur~└~boxv~│~boxvH~╪~boxvL~╡~boxvR~╞~boxvh~┼~boxvl~┤~boxvr~├~bprime~‵~breve~˘~bscr~𝒷~bsemi~⁏~bsim~∽~bsime~⋍~bsol~\\~bsolb~⧅~bsolhsub~⟈~bullet~•~bump~≎~bumpE~⪮~bumpe~≏~bumpeq~≏~cacute~ć~capand~⩄~capbrcup~⩉~capcap~⩋~capcup~⩇~capdot~⩀~caps~∩︀~caret~⁁~caron~ˇ~ccaps~⩍~ccaron~č~ccirc~ĉ~ccups~⩌~ccupssm~⩐~cdot~ċ~cemptyv~⦲~centerdot~·~cfr~𝔠~chcy~ч~check~✓~checkmark~✓~cir~○~cirE~⧃~circeq~≗~circlearrowleft~↺~circlearrowright~↻~circledR~®~circledS~Ⓢ~circledast~⊛~circledcirc~⊚~circleddash~⊝~cire~≗~cirfnint~⨐~cirmid~⫯~cirscir~⧂~clubsuit~♣~colon~:~colone~≔~coloneq~≔~comma~,~commat~@~comp~∁~compfn~∘~complement~∁~complexes~ℂ~congdot~⩭~conint~∮~copf~𝕔~coprod~∐~copysr~℗~cross~✗~cscr~𝒸~csub~⫏~csube~⫑~csup~⫐~csupe~⫒~ctdot~⋯~cudarrl~⤸~cudarrr~⤵~cuepr~⋞~cuesc~⋟~cularr~↶~cularrp~⤽~cupbrcap~⩈~cupcap~⩆~cupcup~⩊~cupdot~⊍~cupor~⩅~cups~∪︀~curarr~↷~curarrm~⤼~curlyeqprec~⋞~curlyeqsucc~⋟~curlyvee~⋎~curlywedge~⋏~curvearrowleft~↶~curvearrowright~↷~cuvee~⋎~cuwed~⋏~cwconint~∲~cwint~∱~cylcty~⌭~dHar~⥥~daleth~ℸ~dash~‐~dashv~⊣~dbkarow~⤏~dblac~˝~dcaron~ď~dcy~д~dd~ⅆ~ddagger~‡~ddarr~⇊~ddotseq~⩷~demptyv~⦱~dfisht~⥿~dfr~𝔡~dharl~⇃~dharr~⇂~diam~⋄~diamond~⋄~diamondsuit~♦~die~¨~digamma~ϝ~disin~⋲~div~÷~divideontimes~⋇~divonx~⋇~djcy~ђ~dlcorn~⌞~dlcrop~⌍~dollar~$~dopf~𝕕~dot~˙~doteq~≐~doteqdot~≑~dotminus~∸~dotplus~∔~dotsquare~⊡~doublebarwedge~⌆~downarrow~↓~downdownarrows~⇊~downharpoonleft~⇃~downharpoonright~⇂~drbkarow~⤐~drcorn~⌟~drcrop~⌌~dscr~𝒹~dscy~ѕ~dsol~⧶~dstrok~đ~dtdot~⋱~dtri~▿~dtrif~▾~duarr~⇵~duhar~⥯~dwangle~⦦~dzcy~џ~dzigrarr~⟿~eDDot~⩷~eDot~≑~easter~⩮~ecaron~ě~ecir~≖~ecolon~≕~ecy~э~edot~ė~ee~ⅇ~efDot~≒~efr~𝔢~eg~⪚~egs~⪖~egsdot~⪘~el~⪙~elinters~⏧~ell~ℓ~els~⪕~elsdot~⪗~emacr~ē~emptyset~∅~emptyv~∅~emsp13~ ~emsp14~ ~eng~ŋ~eogon~ę~eopf~𝕖~epar~⋕~eparsl~⧣~eplus~⩱~epsi~ε~epsiv~ϵ~eqcirc~≖~eqcolon~≕~eqsim~≂~eqslantgtr~⪖~eqslantless~⪕~equals~=~equest~≟~equivDD~⩸~eqvparsl~⧥~erDot~≓~erarr~⥱~escr~ℯ~esdot~≐~esim~≂~excl~!~expectation~ℰ~exponentiale~ⅇ~fallingdotseq~≒~fcy~ф~female~♀~ffilig~ffi~fflig~ff~ffllig~ffl~ffr~𝔣~filig~fi~fjlig~fj~flat~♭~fllig~fl~fltns~▱~fopf~𝕗~fork~⋔~forkv~⫙~fpartint~⨍~frac13~⅓~frac15~⅕~frac16~⅙~frac18~⅛~frac23~⅔~frac25~⅖~frac35~⅗~frac38~⅜~frac45~⅘~frac56~⅚~frac58~⅝~frac78~⅞~frown~⌢~fscr~𝒻~gE~≧~gEl~⪌~gacute~ǵ~gammad~ϝ~gap~⪆~gbreve~ğ~gcirc~ĝ~gcy~г~gdot~ġ~gel~⋛~geq~≥~geqq~≧~geqslant~⩾~ges~⩾~gescc~⪩~gesdot~⪀~gesdoto~⪂~gesdotol~⪄~gesl~⋛︀~gesles~⪔~gfr~𝔤~gg~≫~ggg~⋙~gimel~ℷ~gjcy~ѓ~gl~≷~glE~⪒~gla~⪥~glj~⪤~gnE~≩~gnap~⪊~gnapprox~⪊~gne~⪈~gneq~⪈~gneqq~≩~gnsim~⋧~gopf~𝕘~grave~`~gscr~ℊ~gsim~≳~gsime~⪎~gsiml~⪐~gtcc~⪧~gtcir~⩺~gtdot~⋗~gtlPar~⦕~gtquest~⩼~gtrapprox~⪆~gtrarr~⥸~gtrdot~⋗~gtreqless~⋛~gtreqqless~⪌~gtrless~≷~gtrsim~≳~gvertneqq~≩︀~gvnE~≩︀~hairsp~ ~half~½~hamilt~ℋ~hardcy~ъ~harrcir~⥈~harrw~↭~hbar~ℏ~hcirc~ĥ~heartsuit~♥~hercon~⊹~hfr~𝔥~hksearow~⤥~hkswarow~⤦~hoarr~⇿~homtht~∻~hookleftarrow~↩~hookrightarrow~↪~hopf~𝕙~horbar~―~hscr~𝒽~hslash~ℏ~hstrok~ħ~hybull~⁃~hyphen~‐~ic~⁣~icy~и~iecy~е~iff~⇔~ifr~𝔦~ii~ⅈ~iiiint~⨌~iiint~∭~iinfin~⧜~iiota~℩~ijlig~ij~imacr~ī~imagline~ℐ~imagpart~ℑ~imath~ı~imof~⊷~imped~Ƶ~in~∈~incare~℅~infintie~⧝~inodot~ı~intcal~⊺~integers~ℤ~intercal~⊺~intlarhk~⨗~intprod~⨼~iocy~ё~iogon~į~iopf~𝕚~iprod~⨼~iscr~𝒾~isinE~⋹~isindot~⋵~isins~⋴~isinsv~⋳~isinv~∈~it~⁢~itilde~ĩ~iukcy~і~jcirc~ĵ~jcy~й~jfr~𝔧~jmath~ȷ~jopf~𝕛~jscr~𝒿~jsercy~ј~jukcy~є~kappav~ϰ~kcedil~ķ~kcy~к~kfr~𝔨~kgreen~ĸ~khcy~х~kjcy~ќ~kopf~𝕜~kscr~𝓀~lAarr~⇚~lAtail~⤛~lBarr~⤎~lE~≦~lEg~⪋~lHar~⥢~lacute~ĺ~laemptyv~⦴~lagran~ℒ~langd~⦑~langle~⟨~lap~⪅~larrb~⇤~larrbfs~⤟~larrfs~⤝~larrhk~↩~larrlp~↫~larrpl~⤹~larrsim~⥳~larrtl~↢~lat~⪫~latail~⤙~late~⪭~lates~⪭︀~lbarr~⤌~lbbrk~❲~lbrace~{~lbrack~[~lbrke~⦋~lbrksld~⦏~lbrkslu~⦍~lcaron~ľ~lcedil~ļ~lcub~{~lcy~л~ldca~⤶~ldquor~„~ldrdhar~⥧~ldrushar~⥋~ldsh~↲~leftarrow~←~leftarrowtail~↢~leftharpoondown~↽~leftharpoonup~↼~leftleftarrows~⇇~leftrightarrow~↔~leftrightarrows~⇆~leftrightharpoons~⇋~leftrightsquigarrow~↭~leftthreetimes~⋋~leg~⋚~leq~≤~leqq~≦~leqslant~⩽~les~⩽~lescc~⪨~lesdot~⩿~lesdoto~⪁~lesdotor~⪃~lesg~⋚︀~lesges~⪓~lessapprox~⪅~lessdot~⋖~lesseqgtr~⋚~lesseqqgtr~⪋~lessgtr~≶~lesssim~≲~lfisht~⥼~lfr~𝔩~lg~≶~lgE~⪑~lhard~↽~lharu~↼~lharul~⥪~lhblk~▄~ljcy~љ~ll~≪~llarr~⇇~llcorner~⌞~llhard~⥫~lltri~◺~lmidot~ŀ~lmoust~⎰~lmoustache~⎰~lnE~≨~lnap~⪉~lnapprox~⪉~lne~⪇~lneq~⪇~lneqq~≨~lnsim~⋦~loang~⟬~loarr~⇽~lobrk~⟦~longleftarrow~⟵~longleftrightarrow~⟷~longmapsto~⟼~longrightarrow~⟶~looparrowleft~↫~looparrowright~↬~lopar~⦅~lopf~𝕝~loplus~⨭~lotimes~⨴~lowbar~_~lozenge~◊~lozf~⧫~lpar~(~lparlt~⦓~lrarr~⇆~lrcorner~⌟~lrhar~⇋~lrhard~⥭~lrtri~⊿~lscr~𝓁~lsh~↰~lsim~≲~lsime~⪍~lsimg~⪏~lsqb~[~lsquor~‚~lstrok~ł~ltcc~⪦~ltcir~⩹~ltdot~⋖~lthree~⋋~ltimes~⋉~ltlarr~⥶~ltquest~⩻~ltrPar~⦖~ltri~◃~ltrie~⊴~ltrif~◂~lurdshar~⥊~luruhar~⥦~lvertneqq~≨︀~lvnE~≨︀~mDDot~∺~male~♂~malt~✠~maltese~✠~map~↦~mapsto~↦~mapstodown~↧~mapstoleft~↤~mapstoup~↥~marker~▮~mcomma~⨩~mcy~м~measuredangle~∡~mfr~𝔪~mho~℧~mid~∣~midast~*~midcir~⫰~minusb~⊟~minusd~∸~minusdu~⨪~mlcp~⫛~mldr~…~mnplus~∓~models~⊧~mopf~𝕞~mp~∓~mscr~𝓂~mstpos~∾~multimap~⊸~mumap~⊸~nGg~⋙̸~nGt~≫⃒~nGtv~≫̸~nLeftarrow~⇍~nLeftrightarrow~⇎~nLl~⋘̸~nLt~≪⃒~nLtv~≪̸~nRightarrow~⇏~nVDash~⊯~nVdash~⊮~nacute~ń~nang~∠⃒~nap~≉~napE~⩰̸~napid~≋̸~napos~ʼn~napprox~≉~natur~♮~natural~♮~naturals~ℕ~nbump~≎̸~nbumpe~≏̸~ncap~⩃~ncaron~ň~ncedil~ņ~ncong~≇~ncongdot~⩭̸~ncup~⩂~ncy~н~neArr~⇗~nearhk~⤤~nearr~↗~nearrow~↗~nedot~≐̸~nequiv~≢~nesear~⤨~nesim~≂̸~nexist~∄~nexists~∄~nfr~𝔫~ngE~≧̸~nge~≱~ngeq~≱~ngeqq~≧̸~ngeqslant~⩾̸~nges~⩾̸~ngsim~≵~ngt~≯~ngtr~≯~nhArr~⇎~nharr~↮~nhpar~⫲~nis~⋼~nisd~⋺~niv~∋~njcy~њ~nlArr~⇍~nlE~≦̸~nlarr~↚~nldr~‥~nle~≰~nleftarrow~↚~nleftrightarrow~↮~nleq~≰~nleqq~≦̸~nleqslant~⩽̸~nles~⩽̸~nless~≮~nlsim~≴~nlt~≮~nltri~⋪~nltrie~⋬~nmid~∤~nopf~𝕟~notinE~⋹̸~notindot~⋵̸~notinva~∉~notinvb~⋷~notinvc~⋶~notni~∌~notniva~∌~notnivb~⋾~notnivc~⋽~npar~∦~nparallel~∦~nparsl~⫽⃥~npart~∂̸~npolint~⨔~npr~⊀~nprcue~⋠~npre~⪯̸~nprec~⊀~npreceq~⪯̸~nrArr~⇏~nrarr~↛~nrarrc~⤳̸~nrarrw~↝̸~nrightarrow~↛~nrtri~⋫~nrtrie~⋭~nsc~⊁~nsccue~⋡~nsce~⪰̸~nscr~𝓃~nshortmid~∤~nshortparallel~∦~nsim~≁~nsime~≄~nsimeq~≄~nsmid~∤~nspar~∦~nsqsube~⋢~nsqsupe~⋣~nsubE~⫅̸~nsube~⊈~nsubset~⊂⃒~nsubseteq~⊈~nsubseteqq~⫅̸~nsucc~⊁~nsucceq~⪰̸~nsup~⊅~nsupE~⫆̸~nsupe~⊉~nsupset~⊃⃒~nsupseteq~⊉~nsupseteqq~⫆̸~ntgl~≹~ntlg~≸~ntriangleleft~⋪~ntrianglelefteq~⋬~ntriangleright~⋫~ntrianglerighteq~⋭~num~#~numero~№~numsp~ ~nvDash~⊭~nvHarr~⤄~nvap~≍⃒~nvdash~⊬~nvge~≥⃒~nvgt~>⃒~nvinfin~⧞~nvlArr~⤂~nvle~≤⃒~nvlt~<⃒~nvltrie~⊴⃒~nvrArr~⤃~nvrtrie~⊵⃒~nvsim~∼⃒~nwArr~⇖~nwarhk~⤣~nwarr~↖~nwarrow~↖~nwnear~⤧~oS~Ⓢ~oast~⊛~ocir~⊚~ocy~о~odash~⊝~odblac~ő~odiv~⨸~odot~⊙~odsold~⦼~ofcir~⦿~ofr~𝔬~ogon~˛~ogt~⧁~ohbar~⦵~ohm~Ω~oint~∮~olarr~↺~olcir~⦾~olcross~⦻~olt~⧀~omacr~ō~omid~⦶~ominus~⊖~oopf~𝕠~opar~⦷~operp~⦹~orarr~↻~ord~⩝~order~ℴ~orderof~ℴ~origof~⊶~oror~⩖~orslope~⩗~orv~⩛~oscr~ℴ~osol~⊘~otimesas~⨶~ovbar~⌽~par~∥~parallel~∥~parsim~⫳~parsl~⫽~pcy~п~percnt~%~period~.~pertenk~‱~pfr~𝔭~phiv~ϕ~phmmat~ℳ~phone~☎~pitchfork~⋔~planck~ℏ~planckh~ℎ~plankv~ℏ~plus~+~plusacir~⨣~plusb~⊞~pluscir~⨢~plusdo~∔~plusdu~⨥~pluse~⩲~plussim~⨦~plustwo~⨧~pm~±~pointint~⨕~popf~𝕡~pr~≺~prE~⪳~prap~⪷~prcue~≼~pre~⪯~prec~≺~precapprox~⪷~preccurlyeq~≼~preceq~⪯~precnapprox~⪹~precneqq~⪵~precnsim~⋨~precsim~≾~primes~ℙ~prnE~⪵~prnap~⪹~prnsim~⋨~profalar~⌮~profline~⌒~profsurf~⌓~propto~∝~prsim~≾~prurel~⊰~pscr~𝓅~puncsp~ ~qfr~𝔮~qint~⨌~qopf~𝕢~qprime~⁗~qscr~𝓆~quaternions~ℍ~quatint~⨖~quest~?~questeq~≟~rAarr~⇛~rAtail~⤜~rBarr~⤏~rHar~⥤~race~∽̱~racute~ŕ~raemptyv~⦳~rangd~⦒~range~⦥~rangle~⟩~rarrap~⥵~rarrb~⇥~rarrbfs~⤠~rarrc~⤳~rarrfs~⤞~rarrhk~↪~rarrlp~↬~rarrpl~⥅~rarrsim~⥴~rarrtl~↣~rarrw~↝~ratail~⤚~ratio~∶~rationals~ℚ~rbarr~⤍~rbbrk~❳~rbrace~}~rbrack~]~rbrke~⦌~rbrksld~⦎~rbrkslu~⦐~rcaron~ř~rcedil~ŗ~rcub~}~rcy~р~rdca~⤷~rdldhar~⥩~rdquor~”~rdsh~↳~realine~ℛ~realpart~ℜ~reals~ℝ~rect~▭~rfisht~⥽~rfr~𝔯~rhard~⇁~rharu~⇀~rharul~⥬~rhov~ϱ~rightarrow~→~rightarrowtail~↣~rightharpoondown~⇁~rightharpoonup~⇀~rightleftarrows~⇄~rightleftharpoons~⇌~rightrightarrows~⇉~rightsquigarrow~↝~rightthreetimes~⋌~ring~˚~risingdotseq~≓~rlarr~⇄~rlhar~⇌~rmoust~⎱~rmoustache~⎱~rnmid~⫮~roang~⟭~roarr~⇾~robrk~⟧~ropar~⦆~ropf~𝕣~roplus~⨮~rotimes~⨵~rpar~)~rpargt~⦔~rppolint~⨒~rrarr~⇉~rscr~𝓇~rsh~↱~rsqb~]~rsquor~’~rthree~⋌~rtimes~⋊~rtri~▹~rtrie~⊵~rtrif~▸~rtriltri~⧎~ruluhar~⥨~rx~℞~sacute~ś~sc~≻~scE~⪴~scap~⪸~sccue~≽~sce~⪰~scedil~ş~scirc~ŝ~scnE~⪶~scnap~⪺~scnsim~⋩~scpolint~⨓~scsim~≿~scy~с~sdotb~⊡~sdote~⩦~seArr~⇘~searhk~⤥~searr~↘~searrow~↘~semi~;~seswar~⤩~setminus~∖~setmn~∖~sext~✶~sfr~𝔰~sfrown~⌢~sharp~♯~shchcy~щ~shcy~ш~shortmid~∣~shortparallel~∥~sigmav~ς~simdot~⩪~sime~≃~simeq~≃~simg~⪞~simgE~⪠~siml~⪝~simlE~⪟~simne~≆~simplus~⨤~simrarr~⥲~slarr~←~smallsetminus~∖~smashp~⨳~smeparsl~⧤~smid~∣~smile~⌣~smt~⪪~smte~⪬~smtes~⪬︀~softcy~ь~sol~/~solb~⧄~solbar~⌿~sopf~𝕤~spadesuit~♠~spar~∥~sqcap~⊓~sqcaps~⊓︀~sqcup~⊔~sqcups~⊔︀~sqsub~⊏~sqsube~⊑~sqsubset~⊏~sqsubseteq~⊑~sqsup~⊐~sqsupe~⊒~sqsupset~⊐~sqsupseteq~⊒~squ~□~square~□~squarf~▪~squf~▪~srarr~→~sscr~𝓈~ssetmn~∖~ssmile~⌣~sstarf~⋆~star~☆~starf~★~straightepsilon~ϵ~straightphi~ϕ~strns~¯~subE~⫅~subdot~⪽~subedot~⫃~submult~⫁~subnE~⫋~subne~⊊~subplus~⪿~subrarr~⥹~subset~⊂~subseteq~⊆~subseteqq~⫅~subsetneq~⊊~subsetneqq~⫋~subsim~⫇~subsub~⫕~subsup~⫓~succ~≻~succapprox~⪸~succcurlyeq~≽~succeq~⪰~succnapprox~⪺~succneqq~⪶~succnsim~⋩~succsim~≿~sung~♪~supE~⫆~supdot~⪾~supdsub~⫘~supedot~⫄~suphsol~⟉~suphsub~⫗~suplarr~⥻~supmult~⫂~supnE~⫌~supne~⊋~supplus~⫀~supset~⊃~supseteq~⊇~supseteqq~⫆~supsetneq~⊋~supsetneqq~⫌~supsim~⫈~supsub~⫔~supsup~⫖~swArr~⇙~swarhk~⤦~swarr~↙~swarrow~↙~swnwar~⤪~target~⌖~tbrk~⎴~tcaron~ť~tcedil~ţ~tcy~т~tdot~⃛~telrec~⌕~tfr~𝔱~therefore~∴~thetav~ϑ~thickapprox~≈~thicksim~∼~thkap~≈~thksim~∼~timesb~⊠~timesbar~⨱~timesd~⨰~tint~∭~toea~⤨~top~⊤~topbot~⌶~topcir~⫱~topf~𝕥~topfork~⫚~tosa~⤩~tprime~‴~triangle~▵~triangledown~▿~triangleleft~◃~trianglelefteq~⊴~triangleq~≜~triangleright~▹~trianglerighteq~⊵~tridot~◬~trie~≜~triminus~⨺~triplus~⨹~trisb~⧍~tritime~⨻~trpezium~⏢~tscr~𝓉~tscy~ц~tshcy~ћ~tstrok~ŧ~twixt~≬~twoheadleftarrow~↞~twoheadrightarrow~↠~uHar~⥣~ubrcy~ў~ubreve~ŭ~ucy~у~udarr~⇅~udblac~ű~udhar~⥮~ufisht~⥾~ufr~𝔲~uharl~↿~uharr~↾~uhblk~▀~ulcorn~⌜~ulcorner~⌜~ulcrop~⌏~ultri~◸~umacr~ū~uogon~ų~uopf~𝕦~uparrow~↑~updownarrow~↕~upharpoonleft~↿~upharpoonright~↾~uplus~⊎~upsi~υ~upuparrows~⇈~urcorn~⌝~urcorner~⌝~urcrop~⌎~uring~ů~urtri~◹~uscr~𝓊~utdot~⋰~utilde~ũ~utri~▵~utrif~▴~uuarr~⇈~uwangle~⦧~vArr~⇕~vBar~⫨~vBarv~⫩~vDash~⊨~vangrt~⦜~varepsilon~ϵ~varkappa~ϰ~varnothing~∅~varphi~ϕ~varpi~ϖ~varpropto~∝~varr~↕~varrho~ϱ~varsigma~ς~varsubsetneq~⊊︀~varsubsetneqq~⫋︀~varsupsetneq~⊋︀~varsupsetneqq~⫌︀~vartheta~ϑ~vartriangleleft~⊲~vartriangleright~⊳~vcy~в~vdash~⊢~vee~∨~veebar~⊻~veeeq~≚~vellip~⋮~verbar~|~vert~|~vfr~𝔳~vltri~⊲~vnsub~⊂⃒~vnsup~⊃⃒~vopf~𝕧~vprop~∝~vrtri~⊳~vscr~𝓋~vsubnE~⫋︀~vsubne~⊊︀~vsupnE~⫌︀~vsupne~⊋︀~vzigzag~⦚~wcirc~ŵ~wedbar~⩟~wedge~∧~wedgeq~≙~wfr~𝔴~wopf~𝕨~wp~℘~wr~≀~wreath~≀~wscr~𝓌~xcap~⋂~xcirc~◯~xcup~⋃~xdtri~▽~xfr~𝔵~xhArr~⟺~xharr~⟷~xlArr~⟸~xlarr~⟵~xmap~⟼~xnis~⋻~xodot~⨀~xopf~𝕩~xoplus~⨁~xotime~⨂~xrArr~⟹~xrarr~⟶~xscr~𝓍~xsqcup~⨆~xuplus~⨄~xutri~△~xvee~⋁~xwedge~⋀~yacy~я~ycirc~ŷ~ycy~ы~yfr~𝔶~yicy~ї~yopf~𝕪~yscr~𝓎~yucy~ю~zacute~ź~zcaron~ž~zcy~з~zdot~ż~zeetrf~ℨ~zfr~𝔷~zhcy~ж~zigrarr~⇝~zopf~𝕫~zscr~𝓏~~AMP~&~COPY~©~GT~>~LT~<~QUOT~\"~REG~®", exports.namedReferences['html4']); +//# sourceMappingURL=named-references.js.map + +/***/ }), + +/***/ 1057: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.numericUnicodeMap = void 0; +exports.numericUnicodeMap = { + 0: 65533, + 128: 8364, + 130: 8218, + 131: 402, + 132: 8222, + 133: 8230, + 134: 8224, + 135: 8225, + 136: 710, + 137: 8240, + 138: 352, + 139: 8249, + 140: 338, + 142: 381, + 145: 8216, + 146: 8217, + 147: 8220, + 148: 8221, + 149: 8226, + 150: 8211, + 151: 8212, + 152: 732, + 153: 8482, + 154: 353, + 155: 8250, + 156: 339, + 158: 382, + 159: 376 +}; +//# sourceMappingURL=numeric-unicode-map.js.map + +/***/ }), + +/***/ 9718: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.highSurrogateTo = exports.highSurrogateFrom = exports.getCodePoint = exports.fromCodePoint = void 0; +exports.fromCodePoint = String.fromCodePoint || + function (astralCodePoint) { + return String.fromCharCode(Math.floor((astralCodePoint - 0x10000) / 0x400) + 0xd800, ((astralCodePoint - 0x10000) % 0x400) + 0xdc00); + }; +// @ts-expect-error - String.prototype.codePointAt might not exist in older node versions +exports.getCodePoint = String.prototype.codePointAt + ? function (input, position) { + return input.codePointAt(position); + } + : function (input, position) { + return (input.charCodeAt(position) - 0xd800) * 0x400 + input.charCodeAt(position + 1) - 0xdc00 + 0x10000; + }; +exports.highSurrogateFrom = 0xd800; +exports.highSurrogateTo = 0xdbff; +//# sourceMappingURL=surrogate-pairs.js.map + +/***/ }), + +/***/ 1969: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/* eslint-disable node/no-missing-require */ + +function getPackageJSON() { + return __nccwpck_require__(2721); +} + +exports.getPackageJSON = getPackageJSON; + + /***/ }), /***/ 4012: @@ -65417,6 +107270,30 @@ module.exports = /*#__PURE__*/JSON.parse('{"name":"@actions/cache","version":"4. /***/ }), +/***/ 2721: +/***/ ((module) => { + +"use strict"; +module.exports = /*#__PURE__*/JSON.parse('{"name":"@google-cloud/storage","description":"Cloud Storage Client Library for Node.js","version":"7.16.0","license":"Apache-2.0","author":"Google Inc.","engines":{"node":">=14"},"repository":"googleapis/nodejs-storage","main":"./build/cjs/src/index.js","types":"./build/cjs/src/index.d.ts","type":"module","exports":{".":{"import":{"types":"./build/esm/src/index.d.ts","default":"./build/esm/src/index.js"},"require":{"types":"./build/cjs/src/index.d.ts","default":"./build/cjs/src/index.js"}}},"files":["build/cjs/src","build/cjs/package.json","!build/cjs/src/**/*.map","build/esm/src","!build/esm/src/**/*.map"],"keywords":["google apis client","google api client","google apis","google api","google","google cloud platform","google cloud","cloud","google storage","storage"],"scripts":{"all-test":"npm test && npm run system-test && npm run samples-test","benchwrapper":"node bin/benchwrapper.js","check":"gts check","clean":"rm -rf build/","compile:cjs":"tsc -p ./tsconfig.cjs.json","compile:esm":"tsc -p .","compile":"npm run compile:cjs && npm run compile:esm","conformance-test":"mocha --parallel build/cjs/conformance-test/ --require build/cjs/conformance-test/globalHooks.js","docs-test":"linkinator docs","docs":"jsdoc -c .jsdoc.json","fix":"gts fix","lint":"gts check","postcompile":"cp ./src/package-json-helper.cjs ./build/cjs/src && cp ./src/package-json-helper.cjs ./build/esm/src","postcompile:cjs":"babel --plugins gapic-tools/build/src/replaceImportMetaUrl,gapic-tools/build/src/toggleESMFlagVariable build/cjs/src/util.js -o build/cjs/src/util.js && cp internal-tooling/helpers/package.cjs.json build/cjs/package.json","precompile":"rm -rf build/","preconformance-test":"npm run compile:cjs -- --sourceMap","predocs-test":"npm run docs","predocs":"npm run compile:cjs -- --sourceMap","prelint":"cd samples; npm link ../; npm install","prepare":"npm run compile","presystem-test:esm":"npm run compile:esm","presystem-test":"npm run compile -- --sourceMap","pretest":"npm run compile -- --sourceMap","samples-test":"npm link && cd samples/ && npm link ../ && npm test && cd ../","system-test:esm":"mocha build/esm/system-test --timeout 600000 --exit","system-test":"mocha build/cjs/system-test --timeout 600000 --exit","test":"c8 mocha build/cjs/test"},"dependencies":{"@google-cloud/paginator":"^5.0.0","@google-cloud/projectify":"^4.0.0","@google-cloud/promisify":"<4.1.0","abort-controller":"^3.0.0","async-retry":"^1.3.3","duplexify":"^4.1.3","fast-xml-parser":"^4.4.1","gaxios":"^6.0.2","google-auth-library":"^9.6.3","html-entities":"^2.5.2","mime":"^3.0.0","p-limit":"^3.0.1","retry-request":"^7.0.0","teeny-request":"^9.0.0","uuid":"^8.0.0"},"devDependencies":{"@babel/cli":"^7.22.10","@babel/core":"^7.22.11","@google-cloud/pubsub":"^4.0.0","@grpc/grpc-js":"^1.0.3","@grpc/proto-loader":"^0.7.0","@types/async-retry":"^1.4.3","@types/duplexify":"^3.6.4","@types/mime":"^3.0.0","@types/mocha":"^9.1.1","@types/mockery":"^1.4.29","@types/node":"^22.0.0","@types/node-fetch":"^2.1.3","@types/proxyquire":"^1.3.28","@types/request":"^2.48.4","@types/sinon":"^17.0.0","@types/tmp":"0.2.6","@types/uuid":"^8.0.0","@types/yargs":"^17.0.10","c8":"^9.0.0","form-data":"^4.0.0","gapic-tools":"^0.4.0","gts":"^5.0.0","jsdoc":"^4.0.0","jsdoc-fresh":"^3.0.0","jsdoc-region-tag":"^3.0.0","linkinator":"^3.0.0","mocha":"^9.2.2","mockery":"^2.1.0","nock":"~13.5.0","node-fetch":"^2.6.7","pack-n-play":"^2.0.0","proxyquire":"^2.1.3","sinon":"^18.0.0","nise":"6.0.0","path-to-regexp":"6.3.0","tmp":"^0.2.0","typescript":"^5.1.6","yargs":"^17.3.1"}}'); + +/***/ }), + +/***/ 6495: +/***/ ((module) => { + +"use strict"; +module.exports = /*#__PURE__*/JSON.parse('{"name":"gaxios","version":"6.7.1","description":"A simple common HTTP client specifically for Google APIs and services.","main":"build/src/index.js","types":"build/src/index.d.ts","files":["build/src"],"scripts":{"lint":"gts check","test":"c8 mocha build/test","presystem-test":"npm run compile","system-test":"mocha build/system-test --timeout 80000","compile":"tsc -p .","fix":"gts fix","prepare":"npm run compile","pretest":"npm run compile","webpack":"webpack","prebrowser-test":"npm run compile","browser-test":"node build/browser-test/browser-test-runner.js","docs":"compodoc src/","docs-test":"linkinator docs","predocs-test":"npm run docs","samples-test":"cd samples/ && npm link ../ && npm test && cd ../","prelint":"cd samples; npm link ../; npm install","clean":"gts clean","precompile":"gts clean"},"repository":"googleapis/gaxios","keywords":["google"],"engines":{"node":">=14"},"author":"Google, LLC","license":"Apache-2.0","devDependencies":{"@babel/plugin-proposal-private-methods":"^7.18.6","@compodoc/compodoc":"1.1.19","@types/cors":"^2.8.6","@types/express":"^4.16.1","@types/extend":"^3.0.1","@types/mocha":"^9.0.0","@types/multiparty":"0.0.36","@types/mv":"^2.1.0","@types/ncp":"^2.0.1","@types/node":"^20.0.0","@types/node-fetch":"^2.5.7","@types/sinon":"^17.0.0","@types/tmp":"0.2.6","@types/uuid":"^10.0.0","abort-controller":"^3.0.0","assert":"^2.0.0","browserify":"^17.0.0","c8":"^8.0.0","cheerio":"1.0.0-rc.10","cors":"^2.8.5","execa":"^5.0.0","express":"^4.16.4","form-data":"^4.0.0","gts":"^5.0.0","is-docker":"^2.0.0","karma":"^6.0.0","karma-chrome-launcher":"^3.0.0","karma-coverage":"^2.0.0","karma-firefox-launcher":"^2.0.0","karma-mocha":"^2.0.0","karma-remap-coverage":"^0.1.5","karma-sourcemap-loader":"^0.4.0","karma-webpack":"5.0.0","linkinator":"^3.0.0","mocha":"^8.0.0","multiparty":"^4.2.1","mv":"^2.1.1","ncp":"^2.0.0","nock":"^13.0.0","null-loader":"^4.0.0","puppeteer":"^19.0.0","sinon":"^18.0.0","stream-browserify":"^3.0.0","tmp":"0.2.3","ts-loader":"^8.0.0","typescript":"^5.1.6","webpack":"^5.35.0","webpack-cli":"^4.0.0"},"dependencies":{"extend":"^3.0.2","https-proxy-agent":"^7.0.1","is-stream":"^2.0.0","node-fetch":"^2.6.9","uuid":"^9.0.1"}}'); + +/***/ }), + +/***/ 6066: +/***/ ((module) => { + +"use strict"; +module.exports = /*#__PURE__*/JSON.parse('{"name":"google-auth-library","version":"9.15.1","author":"Google Inc.","description":"Google APIs Authentication Client Library for Node.js","engines":{"node":">=14"},"main":"./build/src/index.js","types":"./build/src/index.d.ts","repository":"googleapis/google-auth-library-nodejs.git","keywords":["google","api","google apis","client","client library"],"dependencies":{"base64-js":"^1.3.0","ecdsa-sig-formatter":"^1.0.11","gaxios":"^6.1.1","gcp-metadata":"^6.1.0","gtoken":"^7.0.0","jws":"^4.0.0"},"devDependencies":{"@types/base64-js":"^1.2.5","@types/chai":"^4.1.7","@types/jws":"^3.1.0","@types/mocha":"^9.0.0","@types/mv":"^2.1.0","@types/ncp":"^2.0.1","@types/node":"^20.4.2","@types/sinon":"^17.0.0","assert-rejects":"^1.0.0","c8":"^8.0.0","chai":"^4.2.0","cheerio":"1.0.0-rc.12","codecov":"^3.0.2","engine.io":"6.6.2","gts":"^5.0.0","is-docker":"^2.0.0","jsdoc":"^4.0.0","jsdoc-fresh":"^3.0.0","jsdoc-region-tag":"^3.0.0","karma":"^6.0.0","karma-chrome-launcher":"^3.0.0","karma-coverage":"^2.0.0","karma-firefox-launcher":"^2.0.0","karma-mocha":"^2.0.0","karma-sourcemap-loader":"^0.4.0","karma-webpack":"5.0.0","keypair":"^1.0.4","linkinator":"^4.0.0","mocha":"^9.2.2","mv":"^2.1.1","ncp":"^2.0.0","nock":"^13.0.0","null-loader":"^4.0.0","pdfmake":"0.2.12","puppeteer":"^21.0.0","sinon":"^18.0.0","ts-loader":"^8.0.0","typescript":"^5.1.6","webpack":"^5.21.2","webpack-cli":"^4.0.0"},"files":["build/src","!build/src/**/*.map"],"scripts":{"test":"c8 mocha build/test","clean":"gts clean","prepare":"npm run compile","lint":"gts check","compile":"tsc -p .","fix":"gts fix","pretest":"npm run compile -- --sourceMap","docs":"jsdoc -c .jsdoc.json","samples-setup":"cd samples/ && npm link ../ && npm run setup && cd ../","samples-test":"cd samples/ && npm link ../ && npm test && cd ../","system-test":"mocha build/system-test --timeout 60000","presystem-test":"npm run compile -- --sourceMap","webpack":"webpack","browser-test":"karma start","docs-test":"linkinator docs","predocs-test":"npm run docs","prelint":"cd samples; npm link ../; npm install","precompile":"gts clean"},"license":"Apache-2.0"}'); + +/***/ }), + /***/ 1813: /***/ ((module) => { diff --git a/dist/save/index.js b/dist/save/index.js index d288e0b..5c4b391 100644 --- a/dist/save/index.js +++ b/dist/save/index.js @@ -7792,7 +7792,7 @@ exports.isTokenCredential = isTokenCredential; Object.defineProperty(exports, "__esModule", ({ value: true })); -var uuid = __nccwpck_require__(3534); +var uuid = __nccwpck_require__(2048); var util = __nccwpck_require__(9023); var tslib = __nccwpck_require__(470); var xml2js = __nccwpck_require__(758); @@ -14157,652 +14157,6 @@ var __createBinding; }); -/***/ }), - -/***/ 3534: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -Object.defineProperty(exports, "v1", ({ - enumerable: true, - get: function () { - return _v.default; - } -})); -Object.defineProperty(exports, "v3", ({ - enumerable: true, - get: function () { - return _v2.default; - } -})); -Object.defineProperty(exports, "v4", ({ - enumerable: true, - get: function () { - return _v3.default; - } -})); -Object.defineProperty(exports, "v5", ({ - enumerable: true, - get: function () { - return _v4.default; - } -})); -Object.defineProperty(exports, "NIL", ({ - enumerable: true, - get: function () { - return _nil.default; - } -})); -Object.defineProperty(exports, "version", ({ - enumerable: true, - get: function () { - return _version.default; - } -})); -Object.defineProperty(exports, "validate", ({ - enumerable: true, - get: function () { - return _validate.default; - } -})); -Object.defineProperty(exports, "stringify", ({ - enumerable: true, - get: function () { - return _stringify.default; - } -})); -Object.defineProperty(exports, "parse", ({ - enumerable: true, - get: function () { - return _parse.default; - } -})); - -var _v = _interopRequireDefault(__nccwpck_require__(3417)); - -var _v2 = _interopRequireDefault(__nccwpck_require__(2855)); - -var _v3 = _interopRequireDefault(__nccwpck_require__(7974)); - -var _v4 = _interopRequireDefault(__nccwpck_require__(6165)); - -var _nil = _interopRequireDefault(__nccwpck_require__(7441)); - -var _version = _interopRequireDefault(__nccwpck_require__(5962)); - -var _validate = _interopRequireDefault(__nccwpck_require__(1690)); - -var _stringify = _interopRequireDefault(__nccwpck_require__(9651)); - -var _parse = _interopRequireDefault(__nccwpck_require__(6221)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/***/ }), - -/***/ 7370: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function md5(bytes) { - if (Array.isArray(bytes)) { - bytes = Buffer.from(bytes); - } else if (typeof bytes === 'string') { - bytes = Buffer.from(bytes, 'utf8'); - } - - return _crypto.default.createHash('md5').update(bytes).digest(); -} - -var _default = md5; -exports["default"] = _default; - -/***/ }), - -/***/ 7441: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; -var _default = '00000000-0000-0000-0000-000000000000'; -exports["default"] = _default; - -/***/ }), - -/***/ 6221: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _validate = _interopRequireDefault(__nccwpck_require__(1690)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function parse(uuid) { - if (!(0, _validate.default)(uuid)) { - throw TypeError('Invalid UUID'); - } - - let v; - const arr = new Uint8Array(16); // Parse ########-....-....-....-............ - - arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; - arr[1] = v >>> 16 & 0xff; - arr[2] = v >>> 8 & 0xff; - arr[3] = v & 0xff; // Parse ........-####-....-....-............ - - arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; - arr[5] = v & 0xff; // Parse ........-....-####-....-............ - - arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; - arr[7] = v & 0xff; // Parse ........-....-....-####-............ - - arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; - arr[9] = v & 0xff; // Parse ........-....-....-....-############ - // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) - - arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; - arr[11] = v / 0x100000000 & 0xff; - arr[12] = v >>> 24 & 0xff; - arr[13] = v >>> 16 & 0xff; - arr[14] = v >>> 8 & 0xff; - arr[15] = v & 0xff; - return arr; -} - -var _default = parse; -exports["default"] = _default; - -/***/ }), - -/***/ 8689: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; -var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; -exports["default"] = _default; - -/***/ }), - -/***/ 6299: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = rng; - -var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate - -let poolPtr = rnds8Pool.length; - -function rng() { - if (poolPtr > rnds8Pool.length - 16) { - _crypto.default.randomFillSync(rnds8Pool); - - poolPtr = 0; - } - - return rnds8Pool.slice(poolPtr, poolPtr += 16); -} - -/***/ }), - -/***/ 8821: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function sha1(bytes) { - if (Array.isArray(bytes)) { - bytes = Buffer.from(bytes); - } else if (typeof bytes === 'string') { - bytes = Buffer.from(bytes, 'utf8'); - } - - return _crypto.default.createHash('sha1').update(bytes).digest(); -} - -var _default = sha1; -exports["default"] = _default; - -/***/ }), - -/***/ 9651: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _validate = _interopRequireDefault(__nccwpck_require__(1690)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -/** - * Convert array of 16 byte values to UUID string format of the form: - * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX - */ -const byteToHex = []; - -for (let i = 0; i < 256; ++i) { - byteToHex.push((i + 0x100).toString(16).substr(1)); -} - -function stringify(arr, offset = 0) { - // Note: Be careful editing this code! It's been tuned for performance - // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 - const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one - // of the following: - // - One or more input array values don't map to a hex octet (leading to - // "undefined" in the uuid) - // - Invalid input values for the RFC `version` or `variant` fields - - if (!(0, _validate.default)(uuid)) { - throw TypeError('Stringified UUID is invalid'); - } - - return uuid; -} - -var _default = stringify; -exports["default"] = _default; - -/***/ }), - -/***/ 3417: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _rng = _interopRequireDefault(__nccwpck_require__(6299)); - -var _stringify = _interopRequireDefault(__nccwpck_require__(9651)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -// **`v1()` - Generate time-based UUID** -// -// Inspired by https://github.com/LiosK/UUID.js -// and http://docs.python.org/library/uuid.html -let _nodeId; - -let _clockseq; // Previous uuid creation time - - -let _lastMSecs = 0; -let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details - -function v1(options, buf, offset) { - let i = buf && offset || 0; - const b = buf || new Array(16); - options = options || {}; - let node = options.node || _nodeId; - let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not - // specified. We do this lazily to minimize issues related to insufficient - // system entropy. See #189 - - if (node == null || clockseq == null) { - const seedBytes = options.random || (options.rng || _rng.default)(); - - if (node == null) { - // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) - node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; - } - - if (clockseq == null) { - // Per 4.2.2, randomize (14 bit) clockseq - clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; - } - } // UUID timestamps are 100 nano-second units since the Gregorian epoch, - // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so - // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' - // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. - - - let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock - // cycle to simulate higher resolution clock - - let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) - - const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression - - if (dt < 0 && options.clockseq === undefined) { - clockseq = clockseq + 1 & 0x3fff; - } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new - // time interval - - - if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { - nsecs = 0; - } // Per 4.2.1.2 Throw error if too many uuids are requested - - - if (nsecs >= 10000) { - throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); - } - - _lastMSecs = msecs; - _lastNSecs = nsecs; - _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch - - msecs += 12219292800000; // `time_low` - - const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; - b[i++] = tl >>> 24 & 0xff; - b[i++] = tl >>> 16 & 0xff; - b[i++] = tl >>> 8 & 0xff; - b[i++] = tl & 0xff; // `time_mid` - - const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; - b[i++] = tmh >>> 8 & 0xff; - b[i++] = tmh & 0xff; // `time_high_and_version` - - b[i++] = tmh >>> 24 & 0xf | 0x10; // include version - - b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) - - b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` - - b[i++] = clockseq & 0xff; // `node` - - for (let n = 0; n < 6; ++n) { - b[i + n] = node[n]; - } - - return buf || (0, _stringify.default)(b); -} - -var _default = v1; -exports["default"] = _default; - -/***/ }), - -/***/ 2855: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _v = _interopRequireDefault(__nccwpck_require__(8132)); - -var _md = _interopRequireDefault(__nccwpck_require__(7370)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -const v3 = (0, _v.default)('v3', 0x30, _md.default); -var _default = v3; -exports["default"] = _default; - -/***/ }), - -/***/ 8132: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = _default; -exports.URL = exports.DNS = void 0; - -var _stringify = _interopRequireDefault(__nccwpck_require__(9651)); - -var _parse = _interopRequireDefault(__nccwpck_require__(6221)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function stringToBytes(str) { - str = unescape(encodeURIComponent(str)); // UTF8 escape - - const bytes = []; - - for (let i = 0; i < str.length; ++i) { - bytes.push(str.charCodeAt(i)); - } - - return bytes; -} - -const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; -exports.DNS = DNS; -const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; -exports.URL = URL; - -function _default(name, version, hashfunc) { - function generateUUID(value, namespace, buf, offset) { - if (typeof value === 'string') { - value = stringToBytes(value); - } - - if (typeof namespace === 'string') { - namespace = (0, _parse.default)(namespace); - } - - if (namespace.length !== 16) { - throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); - } // Compute hash of namespace and value, Per 4.3 - // Future: Use spread syntax when supported on all platforms, e.g. `bytes = - // hashfunc([...namespace, ... value])` - - - let bytes = new Uint8Array(16 + value.length); - bytes.set(namespace); - bytes.set(value, namespace.length); - bytes = hashfunc(bytes); - bytes[6] = bytes[6] & 0x0f | version; - bytes[8] = bytes[8] & 0x3f | 0x80; - - if (buf) { - offset = offset || 0; - - for (let i = 0; i < 16; ++i) { - buf[offset + i] = bytes[i]; - } - - return buf; - } - - return (0, _stringify.default)(bytes); - } // Function#name is not settable on some platforms (#270) - - - try { - generateUUID.name = name; // eslint-disable-next-line no-empty - } catch (err) {} // For CommonJS default export support - - - generateUUID.DNS = DNS; - generateUUID.URL = URL; - return generateUUID; -} - -/***/ }), - -/***/ 7974: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _rng = _interopRequireDefault(__nccwpck_require__(6299)); - -var _stringify = _interopRequireDefault(__nccwpck_require__(9651)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function v4(options, buf, offset) { - options = options || {}; - - const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` - - - rnds[6] = rnds[6] & 0x0f | 0x40; - rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided - - if (buf) { - offset = offset || 0; - - for (let i = 0; i < 16; ++i) { - buf[offset + i] = rnds[i]; - } - - return buf; - } - - return (0, _stringify.default)(rnds); -} - -var _default = v4; -exports["default"] = _default; - -/***/ }), - -/***/ 6165: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _v = _interopRequireDefault(__nccwpck_require__(8132)); - -var _sha = _interopRequireDefault(__nccwpck_require__(8821)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -const v5 = (0, _v.default)('v5', 0x50, _sha.default); -var _default = v5; -exports["default"] = _default; - -/***/ }), - -/***/ 1690: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _regex = _interopRequireDefault(__nccwpck_require__(8689)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function validate(uuid) { - return typeof uuid === 'string' && _regex.default.test(uuid); -} - -var _default = validate; -exports["default"] = _default; - -/***/ }), - -/***/ 5962: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _validate = _interopRequireDefault(__nccwpck_require__(1690)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function version(uuid) { - if (!(0, _validate.default)(uuid)) { - throw TypeError('Invalid UUID'); - } - - return parseInt(uuid.substr(14, 1), 16); -} - -var _default = version; -exports["default"] = _default; - /***/ }), /***/ 5862: @@ -42639,6 +41993,619 @@ var __createBinding; }); +/***/ }), + +/***/ 9469: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/*! + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ResourceStream = exports.paginator = exports.Paginator = void 0; +/*! + * @module common/paginator + */ +const arrify = __nccwpck_require__(6251); +const extend = __nccwpck_require__(3860); +const resource_stream_1 = __nccwpck_require__(7618); +Object.defineProperty(exports, "ResourceStream", ({ enumerable: true, get: function () { return resource_stream_1.ResourceStream; } })); +/*! Developer Documentation + * + * paginator is used to auto-paginate `nextQuery` methods as well as + * streamifying them. + * + * Before: + * + * search.query('done=true', function(err, results, nextQuery) { + * search.query(nextQuery, function(err, results, nextQuery) {}); + * }); + * + * After: + * + * search.query('done=true', function(err, results) {}); + * + * Methods to extend should be written to accept callbacks and return a + * `nextQuery`. + */ +class Paginator { + /** + * Cache the original method, then overwrite it on the Class's prototype. + * + * @param {function} Class - The parent class of the methods to extend. + * @param {string|string[]} methodNames - Name(s) of the methods to extend. + */ + // tslint:disable-next-line:variable-name + extend(Class, methodNames) { + methodNames = arrify(methodNames); + methodNames.forEach(methodName => { + const originalMethod = Class.prototype[methodName]; + // map the original method to a private member + Class.prototype[methodName + '_'] = originalMethod; + // overwrite the original to auto-paginate + /* eslint-disable @typescript-eslint/no-explicit-any */ + Class.prototype[methodName] = function (...args) { + const parsedArguments = paginator.parseArguments_(args); + return paginator.run_(parsedArguments, originalMethod.bind(this)); + }; + }); + } + /** + * Wraps paginated API calls in a readable object stream. + * + * This method simply calls the nextQuery recursively, emitting results to a + * stream. The stream ends when `nextQuery` is null. + * + * `maxResults` will act as a cap for how many results are fetched and emitted + * to the stream. + * + * @param {string} methodName - Name of the method to streamify. + * @return {function} - Wrapped function. + */ + /* eslint-disable @typescript-eslint/no-explicit-any */ + streamify(methodName) { + return function ( + /* eslint-disable @typescript-eslint/no-explicit-any */ + ...args) { + const parsedArguments = paginator.parseArguments_(args); + const originalMethod = this[methodName + '_'] || this[methodName]; + return paginator.runAsStream_(parsedArguments, originalMethod.bind(this)); + }; + } + /** + * Parse a pseudo-array `arguments` for a query and callback. + * + * @param {array} args - The original `arguments` pseduo-array that the original + * method received. + */ + /* eslint-disable @typescript-eslint/no-explicit-any */ + parseArguments_(args) { + let query; + let autoPaginate = true; + let maxApiCalls = -1; + let maxResults = -1; + let callback; + const firstArgument = args[0]; + const lastArgument = args[args.length - 1]; + if (typeof firstArgument === 'function') { + callback = firstArgument; + } + else { + query = firstArgument; + } + if (typeof lastArgument === 'function') { + callback = lastArgument; + } + if (typeof query === 'object') { + query = extend(true, {}, query); + // Check if the user only asked for a certain amount of results. + if (query.maxResults && typeof query.maxResults === 'number') { + // `maxResults` is used API-wide. + maxResults = query.maxResults; + } + else if (typeof query.pageSize === 'number') { + // `pageSize` is Pub/Sub's `maxResults`. + maxResults = query.pageSize; + } + if (query.maxApiCalls && typeof query.maxApiCalls === 'number') { + maxApiCalls = query.maxApiCalls; + delete query.maxApiCalls; + } + // maxResults is the user specified limit. + if (maxResults !== -1 || query.autoPaginate === false) { + autoPaginate = false; + } + } + const parsedArguments = { + query: query || {}, + autoPaginate, + maxApiCalls, + maxResults, + callback, + }; + parsedArguments.streamOptions = extend(true, {}, parsedArguments.query); + delete parsedArguments.streamOptions.autoPaginate; + delete parsedArguments.streamOptions.maxResults; + delete parsedArguments.streamOptions.pageSize; + return parsedArguments; + } + /** + * This simply checks to see if `autoPaginate` is set or not, if it's true + * then we buffer all results, otherwise simply call the original method. + * + * @param {array} parsedArguments - Parsed arguments from the original method + * call. + * @param {object=|string=} parsedArguments.query - Query object. This is most + * commonly an object, but to make the API more simple, it can also be a + * string in some places. + * @param {function=} parsedArguments.callback - Callback function. + * @param {boolean} parsedArguments.autoPaginate - Auto-pagination enabled. + * @param {boolean} parsedArguments.maxApiCalls - Maximum API calls to make. + * @param {number} parsedArguments.maxResults - Maximum results to return. + * @param {function} originalMethod - The cached method that accepts a callback + * and returns `nextQuery` to receive more results. + */ + run_(parsedArguments, originalMethod) { + const query = parsedArguments.query; + const callback = parsedArguments.callback; + if (!parsedArguments.autoPaginate) { + return originalMethod(query, callback); + } + const results = new Array(); + let otherArgs = []; + const promise = new Promise((resolve, reject) => { + const stream = paginator.runAsStream_(parsedArguments, originalMethod); + stream + .on('error', reject) + .on('data', (data) => results.push(data)) + .on('end', () => { + otherArgs = stream._otherArgs || []; + resolve(results); + }); + }); + if (!callback) { + return promise.then(results => [results, query, ...otherArgs]); + } + promise.then(results => callback(null, results, query, ...otherArgs), (err) => callback(err)); + } + /** + * This method simply calls the nextQuery recursively, emitting results to a + * stream. The stream ends when `nextQuery` is null. + * + * `maxResults` will act as a cap for how many results are fetched and emitted + * to the stream. + * + * @param {object=|string=} parsedArguments.query - Query object. This is most + * commonly an object, but to make the API more simple, it can also be a + * string in some places. + * @param {function=} parsedArguments.callback - Callback function. + * @param {boolean} parsedArguments.autoPaginate - Auto-pagination enabled. + * @param {boolean} parsedArguments.maxApiCalls - Maximum API calls to make. + * @param {number} parsedArguments.maxResults - Maximum results to return. + * @param {function} originalMethod - The cached method that accepts a callback + * and returns `nextQuery` to receive more results. + * @return {stream} - Readable object stream. + */ + /* eslint-disable @typescript-eslint/no-explicit-any */ + runAsStream_(parsedArguments, originalMethod) { + return new resource_stream_1.ResourceStream(parsedArguments, originalMethod); + } +} +exports.Paginator = Paginator; +const paginator = new Paginator(); +exports.paginator = paginator; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 7618: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/*! + * Copyright 2019 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ResourceStream = void 0; +const stream_1 = __nccwpck_require__(2203); +class ResourceStream extends stream_1.Transform { + constructor(args, requestFn) { + const options = Object.assign({ objectMode: true }, args.streamOptions); + super(options); + this._ended = false; + this._maxApiCalls = args.maxApiCalls === -1 ? Infinity : args.maxApiCalls; + this._nextQuery = args.query; + this._reading = false; + this._requestFn = requestFn; + this._requestsMade = 0; + this._resultsToSend = args.maxResults === -1 ? Infinity : args.maxResults; + this._otherArgs = []; + } + /* eslint-disable @typescript-eslint/no-explicit-any */ + end(...args) { + this._ended = true; + return super.end(...args); + } + _read() { + if (this._reading) { + return; + } + this._reading = true; + // Wrap in a try/catch to catch input linting errors, e.g. + // an invalid BigQuery query. These errors are thrown in an + // async fashion, which makes them un-catchable by the user. + try { + this._requestFn(this._nextQuery, (err, results, nextQuery, ...otherArgs) => { + if (err) { + this.destroy(err); + return; + } + this._otherArgs = otherArgs; + this._nextQuery = nextQuery; + if (this._resultsToSend !== Infinity) { + results = results.splice(0, this._resultsToSend); + this._resultsToSend -= results.length; + } + let more = true; + for (const result of results) { + if (this._ended) { + break; + } + more = this.push(result); + } + const isFinished = !this._nextQuery || this._resultsToSend < 1; + const madeMaxCalls = ++this._requestsMade >= this._maxApiCalls; + if (isFinished || madeMaxCalls) { + this.end(); + } + if (more && !this._ended) { + setImmediate(() => this._read()); + } + this._reading = false; + }); + } + catch (e) { + this.destroy(e); + } + } +} +exports.ResourceStream = ResourceStream; +//# sourceMappingURL=resource-stream.js.map + +/***/ }), + +/***/ 7635: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.MissingProjectIdError = exports.replaceProjectIdToken = void 0; +const stream_1 = __nccwpck_require__(2203); +// Copyright 2014 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +/** + * Populate the `{{projectId}}` placeholder. + * + * @throws {Error} If a projectId is required, but one is not provided. + * + * @param {*} - Any input value that may contain a placeholder. Arrays and objects will be looped. + * @param {string} projectId - A projectId. If not provided + * @return {*} - The original argument with all placeholders populated. + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function replaceProjectIdToken(value, projectId) { + if (Array.isArray(value)) { + value = value.map(v => replaceProjectIdToken(v, projectId)); + } + if (value !== null && + typeof value === 'object' && + !(value instanceof Buffer) && + !(value instanceof stream_1.Stream) && + typeof value.hasOwnProperty === 'function') { + for (const opt in value) { + // eslint-disable-next-line no-prototype-builtins + if (value.hasOwnProperty(opt)) { + value[opt] = replaceProjectIdToken(value[opt], projectId); + } + } + } + if (typeof value === 'string' && + value.indexOf('{{projectId}}') > -1) { + if (!projectId || projectId === '{{projectId}}') { + throw new MissingProjectIdError(); + } + value = value.replace(/{{projectId}}/g, projectId); + } + return value; +} +exports.replaceProjectIdToken = replaceProjectIdToken; +/** + * Custom error type for missing project ID errors. + */ +class MissingProjectIdError extends Error { + constructor() { + super(...arguments); + this.message = `Sorry, we cannot connect to Cloud Services without a project + ID. You may specify one with an environment variable named + "GOOGLE_CLOUD_PROJECT".`.replace(/ +/g, ' '); + } +} +exports.MissingProjectIdError = MissingProjectIdError; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 206: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/* eslint-disable prefer-rest-params */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.callbackifyAll = exports.callbackify = exports.promisifyAll = exports.promisify = void 0; +/** + * Wraps a callback style function to conditionally return a promise. + * + * @param {function} originalMethod - The method to promisify. + * @param {object=} options - Promise options. + * @param {boolean} options.singular - Resolve the promise with single arg instead of an array. + * @return {function} wrapped + */ +function promisify(originalMethod, options) { + if (originalMethod.promisified_) { + return originalMethod; + } + options = options || {}; + const slice = Array.prototype.slice; + // tslint:disable-next-line:no-any + const wrapper = function () { + let last; + for (last = arguments.length - 1; last >= 0; last--) { + const arg = arguments[last]; + if (typeof arg === 'undefined') { + continue; // skip trailing undefined. + } + if (typeof arg !== 'function') { + break; // non-callback last argument found. + } + return originalMethod.apply(this, arguments); + } + // peel trailing undefined. + const args = slice.call(arguments, 0, last + 1); + // tslint:disable-next-line:variable-name + let PromiseCtor = Promise; + // Because dedupe will likely create a single install of + // @google-cloud/common to be shared amongst all modules, we need to + // localize it at the Service level. + if (this && this.Promise) { + PromiseCtor = this.Promise; + } + return new PromiseCtor((resolve, reject) => { + // tslint:disable-next-line:no-any + args.push((...args) => { + const callbackArgs = slice.call(args); + const err = callbackArgs.shift(); + if (err) { + return reject(err); + } + if (options.singular && callbackArgs.length === 1) { + resolve(callbackArgs[0]); + } + else { + resolve(callbackArgs); + } + }); + originalMethod.apply(this, args); + }); + }; + wrapper.promisified_ = true; + return wrapper; +} +exports.promisify = promisify; +/** + * Promisifies certain Class methods. This will not promisify private or + * streaming methods. + * + * @param {module:common/service} Class - Service class. + * @param {object=} options - Configuration object. + */ +// tslint:disable-next-line:variable-name +function promisifyAll(Class, options) { + const exclude = (options && options.exclude) || []; + const ownPropertyNames = Object.getOwnPropertyNames(Class.prototype); + const methods = ownPropertyNames.filter(methodName => { + // clang-format off + return (!exclude.includes(methodName) && + typeof Class.prototype[methodName] === 'function' && // is it a function? + !/(^_|(Stream|_)|promise$)|^constructor$/.test(methodName) // is it promisable? + ); + // clang-format on + }); + methods.forEach(methodName => { + const originalMethod = Class.prototype[methodName]; + if (!originalMethod.promisified_) { + Class.prototype[methodName] = exports.promisify(originalMethod, options); + } + }); +} +exports.promisifyAll = promisifyAll; +/** + * Wraps a promisy type function to conditionally call a callback function. + * + * @param {function} originalMethod - The method to callbackify. + * @param {object=} options - Callback options. + * @param {boolean} options.singular - Pass to the callback a single arg instead of an array. + * @return {function} wrapped + */ +function callbackify(originalMethod) { + if (originalMethod.callbackified_) { + return originalMethod; + } + // tslint:disable-next-line:no-any + const wrapper = function () { + if (typeof arguments[arguments.length - 1] !== 'function') { + return originalMethod.apply(this, arguments); + } + const cb = Array.prototype.pop.call(arguments); + originalMethod.apply(this, arguments).then( + // tslint:disable-next-line:no-any + (res) => { + res = Array.isArray(res) ? res : [res]; + cb(null, ...res); + }, (err) => cb(err)); + }; + wrapper.callbackified_ = true; + return wrapper; +} +exports.callbackify = callbackify; +/** + * Callbackifies certain Class methods. This will not callbackify private or + * streaming methods. + * + * @param {module:common/service} Class - Service class. + * @param {object=} options - Configuration object. + */ +function callbackifyAll( +// tslint:disable-next-line:variable-name +Class, options) { + const exclude = (options && options.exclude) || []; + const ownPropertyNames = Object.getOwnPropertyNames(Class.prototype); + const methods = ownPropertyNames.filter(methodName => { + // clang-format off + return (!exclude.includes(methodName) && + typeof Class.prototype[methodName] === 'function' && // is it a function? + !/^_|(Stream|_)|^constructor$/.test(methodName) // is it callbackifyable? + ); + // clang-format on + }); + methods.forEach(methodName => { + const originalMethod = Class.prototype[methodName]; + if (!originalMethod.callbackified_) { + Class.prototype[methodName] = exports.callbackify(originalMethod); + } + }); +} +exports.callbackifyAll = callbackifyAll; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 9977: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +const Queue = __nccwpck_require__(538); + +const pLimit = concurrency => { + if (!((Number.isInteger(concurrency) || concurrency === Infinity) && concurrency > 0)) { + throw new TypeError('Expected `concurrency` to be a number from 1 and up'); + } + + const queue = new Queue(); + let activeCount = 0; + + const next = () => { + activeCount--; + + if (queue.size > 0) { + queue.dequeue()(); + } + }; + + const run = async (fn, resolve, ...args) => { + activeCount++; + + const result = (async () => fn(...args))(); + + resolve(result); + + try { + await result; + } catch {} + + next(); + }; + + const enqueue = (fn, resolve, ...args) => { + queue.enqueue(run.bind(null, fn, resolve, ...args)); + + (async () => { + // This function needs to wait until the next microtask before comparing + // `activeCount` to `concurrency`, because `activeCount` is updated asynchronously + // when the run function is dequeued and called. The comparison in the if-statement + // needs to happen asynchronously as well to get an up-to-date value for `activeCount`. + await Promise.resolve(); + + if (activeCount < concurrency && queue.size > 0) { + queue.dequeue()(); + } + })(); + }; + + const generator = (fn, ...args) => new Promise(resolve => { + enqueue(fn, resolve, ...args); + }); + + Object.defineProperties(generator, { + activeCount: { + get: () => activeCount + }, + pendingCount: { + get: () => queue.size + }, + clearQueue: { + value: () => { + queue.clear(); + } + } + }); + + return generator; +}; + +module.exports = pLimit; + + /***/ }), /***/ 9750: @@ -42751,7 +42718,7 @@ exports.ContextAPI = ContextAPI; */ Object.defineProperty(exports, "__esModule", ({ value: true })); exports.DiagAPI = void 0; -const ComponentLogger_1 = __nccwpck_require__(7723); +const ComponentLogger_1 = __nccwpck_require__(104); const logLevelLogger_1 = __nccwpck_require__(3514); const types_1 = __nccwpck_require__(2573); const global_utils_1 = __nccwpck_require__(9923); @@ -43473,7 +43440,7 @@ exports.diag = diag_1.DiagAPI.instance(); /***/ }), -/***/ 7723: +/***/ 104: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -50282,6 +50249,531 @@ class ReflectionTypeCheck { exports.ReflectionTypeCheck = ReflectionTypeCheck; +/***/ }), + +/***/ 8662: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +function once(emitter, name, { signal } = {}) { + return new Promise((resolve, reject) => { + function cleanup() { + signal === null || signal === void 0 ? void 0 : signal.removeEventListener('abort', cleanup); + emitter.removeListener(name, onEvent); + emitter.removeListener('error', onError); + } + function onEvent(...args) { + cleanup(); + resolve(args); + } + function onError(err) { + cleanup(); + reject(err); + } + signal === null || signal === void 0 ? void 0 : signal.addEventListener('abort', cleanup); + emitter.on(name, onEvent); + emitter.on('error', onError); + }); +} +exports["default"] = once; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 7413: +/***/ ((module, exports, __nccwpck_require__) => { + +"use strict"; +/** + * @author Toru Nagashima + * See LICENSE file in root directory for full license. + */ + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +var eventTargetShim = __nccwpck_require__(6577); + +/** + * The signal class. + * @see https://dom.spec.whatwg.org/#abortsignal + */ +class AbortSignal extends eventTargetShim.EventTarget { + /** + * AbortSignal cannot be constructed directly. + */ + constructor() { + super(); + throw new TypeError("AbortSignal cannot be constructed directly"); + } + /** + * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise. + */ + get aborted() { + const aborted = abortedFlags.get(this); + if (typeof aborted !== "boolean") { + throw new TypeError(`Expected 'this' to be an 'AbortSignal' object, but got ${this === null ? "null" : typeof this}`); + } + return aborted; + } +} +eventTargetShim.defineEventAttribute(AbortSignal.prototype, "abort"); +/** + * Create an AbortSignal object. + */ +function createAbortSignal() { + const signal = Object.create(AbortSignal.prototype); + eventTargetShim.EventTarget.call(signal); + abortedFlags.set(signal, false); + return signal; +} +/** + * Abort a given signal. + */ +function abortSignal(signal) { + if (abortedFlags.get(signal) !== false) { + return; + } + abortedFlags.set(signal, true); + signal.dispatchEvent({ type: "abort" }); +} +/** + * Aborted flag for each instances. + */ +const abortedFlags = new WeakMap(); +// Properties should be enumerable. +Object.defineProperties(AbortSignal.prototype, { + aborted: { enumerable: true }, +}); +// `toString()` should return `"[object AbortSignal]"` +if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, { + configurable: true, + value: "AbortSignal", + }); +} + +/** + * The AbortController. + * @see https://dom.spec.whatwg.org/#abortcontroller + */ +class AbortController { + /** + * Initialize this controller. + */ + constructor() { + signals.set(this, createAbortSignal()); + } + /** + * Returns the `AbortSignal` object associated with this object. + */ + get signal() { + return getSignal(this); + } + /** + * Abort and signal to any observers that the associated activity is to be aborted. + */ + abort() { + abortSignal(getSignal(this)); + } +} +/** + * Associated signals. + */ +const signals = new WeakMap(); +/** + * Get the associated signal of a given controller. + */ +function getSignal(controller) { + const signal = signals.get(controller); + if (signal == null) { + throw new TypeError(`Expected 'this' to be an 'AbortController' object, but got ${controller === null ? "null" : typeof controller}`); + } + return signal; +} +// Properties should be enumerable. +Object.defineProperties(AbortController.prototype, { + signal: { enumerable: true }, + abort: { enumerable: true }, +}); +if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(AbortController.prototype, Symbol.toStringTag, { + configurable: true, + value: "AbortController", + }); +} + +exports.AbortController = AbortController; +exports.AbortSignal = AbortSignal; +exports["default"] = AbortController; + +module.exports = AbortController +module.exports.AbortController = module.exports["default"] = AbortController +module.exports.AbortSignal = AbortSignal +//# sourceMappingURL=abort-controller.js.map + + +/***/ }), + +/***/ 5183: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.req = exports.json = exports.toBuffer = void 0; +const http = __importStar(__nccwpck_require__(8611)); +const https = __importStar(__nccwpck_require__(5692)); +async function toBuffer(stream) { + let length = 0; + const chunks = []; + for await (const chunk of stream) { + length += chunk.length; + chunks.push(chunk); + } + return Buffer.concat(chunks, length); +} +exports.toBuffer = toBuffer; +// eslint-disable-next-line @typescript-eslint/no-explicit-any +async function json(stream) { + const buf = await toBuffer(stream); + const str = buf.toString('utf8'); + try { + return JSON.parse(str); + } + catch (_err) { + const err = _err; + err.message += ` (input: ${str})`; + throw err; + } +} +exports.json = json; +function req(url, opts = {}) { + const href = typeof url === 'string' ? url : url.href; + const req = (href.startsWith('https:') ? https : http).request(url, opts); + const promise = new Promise((resolve, reject) => { + req + .once('response', resolve) + .once('error', reject) + .end(); + }); + req.then = promise.then.bind(promise); + return req; +} +exports.req = req; +//# sourceMappingURL=helpers.js.map + +/***/ }), + +/***/ 8894: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Agent = void 0; +const net = __importStar(__nccwpck_require__(9278)); +const http = __importStar(__nccwpck_require__(8611)); +const https_1 = __nccwpck_require__(5692); +__exportStar(__nccwpck_require__(5183), exports); +const INTERNAL = Symbol('AgentBaseInternalState'); +class Agent extends http.Agent { + constructor(opts) { + super(opts); + this[INTERNAL] = {}; + } + /** + * Determine whether this is an `http` or `https` request. + */ + isSecureEndpoint(options) { + if (options) { + // First check the `secureEndpoint` property explicitly, since this + // means that a parent `Agent` is "passing through" to this instance. + // eslint-disable-next-line @typescript-eslint/no-explicit-any + if (typeof options.secureEndpoint === 'boolean') { + return options.secureEndpoint; + } + // If no explicit `secure` endpoint, check if `protocol` property is + // set. This will usually be the case since using a full string URL + // or `URL` instance should be the most common usage. + if (typeof options.protocol === 'string') { + return options.protocol === 'https:'; + } + } + // Finally, if no `protocol` property was set, then fall back to + // checking the stack trace of the current call stack, and try to + // detect the "https" module. + const { stack } = new Error(); + if (typeof stack !== 'string') + return false; + return stack + .split('\n') + .some((l) => l.indexOf('(https.js:') !== -1 || + l.indexOf('node:https:') !== -1); + } + // In order to support async signatures in `connect()` and Node's native + // connection pooling in `http.Agent`, the array of sockets for each origin + // has to be updated synchronously. This is so the length of the array is + // accurate when `addRequest()` is next called. We achieve this by creating a + // fake socket and adding it to `sockets[origin]` and incrementing + // `totalSocketCount`. + incrementSockets(name) { + // If `maxSockets` and `maxTotalSockets` are both Infinity then there is no + // need to create a fake socket because Node.js native connection pooling + // will never be invoked. + if (this.maxSockets === Infinity && this.maxTotalSockets === Infinity) { + return null; + } + // All instances of `sockets` are expected TypeScript errors. The + // alternative is to add it as a private property of this class but that + // will break TypeScript subclassing. + if (!this.sockets[name]) { + // @ts-expect-error `sockets` is readonly in `@types/node` + this.sockets[name] = []; + } + const fakeSocket = new net.Socket({ writable: false }); + this.sockets[name].push(fakeSocket); + // @ts-expect-error `totalSocketCount` isn't defined in `@types/node` + this.totalSocketCount++; + return fakeSocket; + } + decrementSockets(name, socket) { + if (!this.sockets[name] || socket === null) { + return; + } + const sockets = this.sockets[name]; + const index = sockets.indexOf(socket); + if (index !== -1) { + sockets.splice(index, 1); + // @ts-expect-error `totalSocketCount` isn't defined in `@types/node` + this.totalSocketCount--; + if (sockets.length === 0) { + // @ts-expect-error `sockets` is readonly in `@types/node` + delete this.sockets[name]; + } + } + } + // In order to properly update the socket pool, we need to call `getName()` on + // the core `https.Agent` if it is a secureEndpoint. + getName(options) { + const secureEndpoint = typeof options.secureEndpoint === 'boolean' + ? options.secureEndpoint + : this.isSecureEndpoint(options); + if (secureEndpoint) { + // @ts-expect-error `getName()` isn't defined in `@types/node` + return https_1.Agent.prototype.getName.call(this, options); + } + // @ts-expect-error `getName()` isn't defined in `@types/node` + return super.getName(options); + } + createSocket(req, options, cb) { + const connectOpts = { + ...options, + secureEndpoint: this.isSecureEndpoint(options), + }; + const name = this.getName(connectOpts); + const fakeSocket = this.incrementSockets(name); + Promise.resolve() + .then(() => this.connect(req, connectOpts)) + .then((socket) => { + this.decrementSockets(name, fakeSocket); + if (socket instanceof http.Agent) { + try { + // @ts-expect-error `addRequest()` isn't defined in `@types/node` + return socket.addRequest(req, connectOpts); + } + catch (err) { + return cb(err); + } + } + this[INTERNAL].currentSocket = socket; + // @ts-expect-error `createSocket()` isn't defined in `@types/node` + super.createSocket(req, options, cb); + }, (err) => { + this.decrementSockets(name, fakeSocket); + cb(err); + }); + } + createConnection() { + const socket = this[INTERNAL].currentSocket; + this[INTERNAL].currentSocket = undefined; + if (!socket) { + throw new Error('No socket was returned in the `connect()` function'); + } + return socket; + } + get defaultPort() { + return (this[INTERNAL].defaultPort ?? + (this.protocol === 'https:' ? 443 : 80)); + } + set defaultPort(v) { + if (this[INTERNAL]) { + this[INTERNAL].defaultPort = v; + } + } + get protocol() { + return (this[INTERNAL].protocol ?? + (this.isSecureEndpoint() ? 'https:' : 'http:')); + } + set protocol(v) { + if (this[INTERNAL]) { + this[INTERNAL].protocol = v; + } + } +} +exports.Agent = Agent; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 6251: +/***/ ((module) => { + +"use strict"; + + +const arrify = value => { + if (value === null || value === undefined) { + return []; + } + + if (Array.isArray(value)) { + return value; + } + + if (typeof value === 'string') { + return [value]; + } + + if (typeof value[Symbol.iterator] === 'function') { + return [...value]; + } + + return [value]; +}; + +module.exports = arrify; + + +/***/ }), + +/***/ 5195: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +// Packages +var retrier = __nccwpck_require__(5546); + +function retry(fn, opts) { + function run(resolve, reject) { + var options = opts || {}; + var op; + + // Default `randomize` to true + if (!('randomize' in options)) { + options.randomize = true; + } + + op = retrier.operation(options); + + // We allow the user to abort retrying + // this makes sense in the cases where + // knowledge is obtained that retrying + // would be futile (e.g.: auth errors) + + function bail(err) { + reject(err || new Error('Aborted')); + } + + function onError(err, num) { + if (err.bail) { + bail(err); + return; + } + + if (!op.retry(err)) { + reject(op.mainError()); + } else if (options.onRetry) { + options.onRetry(err, num); + } + } + + function runAttempt(num) { + var val; + + try { + val = fn(bail, num); + } catch (err) { + onError(err, num); + return; + } + + Promise.resolve(val) + .then(resolve) + .catch(function catchIt(err) { + onError(err, num); + }); + } + + op.attempt(runAttempt); + } + + return new Promise(run); +} + +module.exports = retry; + + /***/ }), /***/ 1324: @@ -50793,6 +51285,3093 @@ function range(a, b, str) { } +/***/ }), + +/***/ 8793: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +exports.byteLength = byteLength +exports.toByteArray = toByteArray +exports.fromByteArray = fromByteArray + +var lookup = [] +var revLookup = [] +var Arr = typeof Uint8Array !== 'undefined' ? Uint8Array : Array + +var code = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/' +for (var i = 0, len = code.length; i < len; ++i) { + lookup[i] = code[i] + revLookup[code.charCodeAt(i)] = i +} + +// Support decoding URL-safe base64 strings, as Node.js does. +// See: https://en.wikipedia.org/wiki/Base64#URL_applications +revLookup['-'.charCodeAt(0)] = 62 +revLookup['_'.charCodeAt(0)] = 63 + +function getLens (b64) { + var len = b64.length + + if (len % 4 > 0) { + throw new Error('Invalid string. Length must be a multiple of 4') + } + + // Trim off extra bytes after placeholder bytes are found + // See: https://github.com/beatgammit/base64-js/issues/42 + var validLen = b64.indexOf('=') + if (validLen === -1) validLen = len + + var placeHoldersLen = validLen === len + ? 0 + : 4 - (validLen % 4) + + return [validLen, placeHoldersLen] +} + +// base64 is 4/3 + up to two characters of the original data +function byteLength (b64) { + var lens = getLens(b64) + var validLen = lens[0] + var placeHoldersLen = lens[1] + return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen +} + +function _byteLength (b64, validLen, placeHoldersLen) { + return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen +} + +function toByteArray (b64) { + var tmp + var lens = getLens(b64) + var validLen = lens[0] + var placeHoldersLen = lens[1] + + var arr = new Arr(_byteLength(b64, validLen, placeHoldersLen)) + + var curByte = 0 + + // if there are placeholders, only get up to the last complete 4 chars + var len = placeHoldersLen > 0 + ? validLen - 4 + : validLen + + var i + for (i = 0; i < len; i += 4) { + tmp = + (revLookup[b64.charCodeAt(i)] << 18) | + (revLookup[b64.charCodeAt(i + 1)] << 12) | + (revLookup[b64.charCodeAt(i + 2)] << 6) | + revLookup[b64.charCodeAt(i + 3)] + arr[curByte++] = (tmp >> 16) & 0xFF + arr[curByte++] = (tmp >> 8) & 0xFF + arr[curByte++] = tmp & 0xFF + } + + if (placeHoldersLen === 2) { + tmp = + (revLookup[b64.charCodeAt(i)] << 2) | + (revLookup[b64.charCodeAt(i + 1)] >> 4) + arr[curByte++] = tmp & 0xFF + } + + if (placeHoldersLen === 1) { + tmp = + (revLookup[b64.charCodeAt(i)] << 10) | + (revLookup[b64.charCodeAt(i + 1)] << 4) | + (revLookup[b64.charCodeAt(i + 2)] >> 2) + arr[curByte++] = (tmp >> 8) & 0xFF + arr[curByte++] = tmp & 0xFF + } + + return arr +} + +function tripletToBase64 (num) { + return lookup[num >> 18 & 0x3F] + + lookup[num >> 12 & 0x3F] + + lookup[num >> 6 & 0x3F] + + lookup[num & 0x3F] +} + +function encodeChunk (uint8, start, end) { + var tmp + var output = [] + for (var i = start; i < end; i += 3) { + tmp = + ((uint8[i] << 16) & 0xFF0000) + + ((uint8[i + 1] << 8) & 0xFF00) + + (uint8[i + 2] & 0xFF) + output.push(tripletToBase64(tmp)) + } + return output.join('') +} + +function fromByteArray (uint8) { + var tmp + var len = uint8.length + var extraBytes = len % 3 // if we have 1 byte left, pad 2 bytes + var parts = [] + var maxChunkLength = 16383 // must be multiple of 3 + + // go through the array every three bytes, we'll deal with trailing stuff later + for (var i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) { + parts.push(encodeChunk(uint8, i, (i + maxChunkLength) > len2 ? len2 : (i + maxChunkLength))) + } + + // pad the end with zeros, but make sure to not forget the extra bytes + if (extraBytes === 1) { + tmp = uint8[len - 1] + parts.push( + lookup[tmp >> 2] + + lookup[(tmp << 4) & 0x3F] + + '==' + ) + } else if (extraBytes === 2) { + tmp = (uint8[len - 2] << 8) + uint8[len - 1] + parts.push( + lookup[tmp >> 10] + + lookup[(tmp >> 4) & 0x3F] + + lookup[(tmp << 2) & 0x3F] + + '=' + ) + } + + return parts.join('') +} + + +/***/ }), + +/***/ 1259: +/***/ (function(module) { + +;(function (globalObject) { + 'use strict'; + +/* + * bignumber.js v9.2.1 + * A JavaScript library for arbitrary-precision arithmetic. + * https://github.com/MikeMcl/bignumber.js + * Copyright (c) 2025 Michael Mclaughlin + * MIT Licensed. + * + * BigNumber.prototype methods | BigNumber methods + * | + * absoluteValue abs | clone + * comparedTo | config set + * decimalPlaces dp | DECIMAL_PLACES + * dividedBy div | ROUNDING_MODE + * dividedToIntegerBy idiv | EXPONENTIAL_AT + * exponentiatedBy pow | RANGE + * integerValue | CRYPTO + * isEqualTo eq | MODULO_MODE + * isFinite | POW_PRECISION + * isGreaterThan gt | FORMAT + * isGreaterThanOrEqualTo gte | ALPHABET + * isInteger | isBigNumber + * isLessThan lt | maximum max + * isLessThanOrEqualTo lte | minimum min + * isNaN | random + * isNegative | sum + * isPositive | + * isZero | + * minus | + * modulo mod | + * multipliedBy times | + * negated | + * plus | + * precision sd | + * shiftedBy | + * squareRoot sqrt | + * toExponential | + * toFixed | + * toFormat | + * toFraction | + * toJSON | + * toNumber | + * toPrecision | + * toString | + * valueOf | + * + */ + + + var BigNumber, + isNumeric = /^-?(?:\d+(?:\.\d*)?|\.\d+)(?:e[+-]?\d+)?$/i, + mathceil = Math.ceil, + mathfloor = Math.floor, + + bignumberError = '[BigNumber Error] ', + tooManyDigits = bignumberError + 'Number primitive has more than 15 significant digits: ', + + BASE = 1e14, + LOG_BASE = 14, + MAX_SAFE_INTEGER = 0x1fffffffffffff, // 2^53 - 1 + // MAX_INT32 = 0x7fffffff, // 2^31 - 1 + POWS_TEN = [1, 10, 100, 1e3, 1e4, 1e5, 1e6, 1e7, 1e8, 1e9, 1e10, 1e11, 1e12, 1e13], + SQRT_BASE = 1e7, + + // EDITABLE + // The limit on the value of DECIMAL_PLACES, TO_EXP_NEG, TO_EXP_POS, MIN_EXP, MAX_EXP, and + // the arguments to toExponential, toFixed, toFormat, and toPrecision. + MAX = 1E9; // 0 to MAX_INT32 + + + /* + * Create and return a BigNumber constructor. + */ + function clone(configObject) { + var div, convertBase, parseNumeric, + P = BigNumber.prototype = { constructor: BigNumber, toString: null, valueOf: null }, + ONE = new BigNumber(1), + + + //----------------------------- EDITABLE CONFIG DEFAULTS ------------------------------- + + + // The default values below must be integers within the inclusive ranges stated. + // The values can also be changed at run-time using BigNumber.set. + + // The maximum number of decimal places for operations involving division. + DECIMAL_PLACES = 20, // 0 to MAX + + // The rounding mode used when rounding to the above decimal places, and when using + // toExponential, toFixed, toFormat and toPrecision, and round (default value). + // UP 0 Away from zero. + // DOWN 1 Towards zero. + // CEIL 2 Towards +Infinity. + // FLOOR 3 Towards -Infinity. + // HALF_UP 4 Towards nearest neighbour. If equidistant, up. + // HALF_DOWN 5 Towards nearest neighbour. If equidistant, down. + // HALF_EVEN 6 Towards nearest neighbour. If equidistant, towards even neighbour. + // HALF_CEIL 7 Towards nearest neighbour. If equidistant, towards +Infinity. + // HALF_FLOOR 8 Towards nearest neighbour. If equidistant, towards -Infinity. + ROUNDING_MODE = 4, // 0 to 8 + + // EXPONENTIAL_AT : [TO_EXP_NEG , TO_EXP_POS] + + // The exponent value at and beneath which toString returns exponential notation. + // Number type: -7 + TO_EXP_NEG = -7, // 0 to -MAX + + // The exponent value at and above which toString returns exponential notation. + // Number type: 21 + TO_EXP_POS = 21, // 0 to MAX + + // RANGE : [MIN_EXP, MAX_EXP] + + // The minimum exponent value, beneath which underflow to zero occurs. + // Number type: -324 (5e-324) + MIN_EXP = -1e7, // -1 to -MAX + + // The maximum exponent value, above which overflow to Infinity occurs. + // Number type: 308 (1.7976931348623157e+308) + // For MAX_EXP > 1e7, e.g. new BigNumber('1e100000000').plus(1) may be slow. + MAX_EXP = 1e7, // 1 to MAX + + // Whether to use cryptographically-secure random number generation, if available. + CRYPTO = false, // true or false + + // The modulo mode used when calculating the modulus: a mod n. + // The quotient (q = a / n) is calculated according to the corresponding rounding mode. + // The remainder (r) is calculated as: r = a - n * q. + // + // UP 0 The remainder is positive if the dividend is negative, else is negative. + // DOWN 1 The remainder has the same sign as the dividend. + // This modulo mode is commonly known as 'truncated division' and is + // equivalent to (a % n) in JavaScript. + // FLOOR 3 The remainder has the same sign as the divisor (Python %). + // HALF_EVEN 6 This modulo mode implements the IEEE 754 remainder function. + // EUCLID 9 Euclidian division. q = sign(n) * floor(a / abs(n)). + // The remainder is always positive. + // + // The truncated division, floored division, Euclidian division and IEEE 754 remainder + // modes are commonly used for the modulus operation. + // Although the other rounding modes can also be used, they may not give useful results. + MODULO_MODE = 1, // 0 to 9 + + // The maximum number of significant digits of the result of the exponentiatedBy operation. + // If POW_PRECISION is 0, there will be unlimited significant digits. + POW_PRECISION = 0, // 0 to MAX + + // The format specification used by the BigNumber.prototype.toFormat method. + FORMAT = { + prefix: '', + groupSize: 3, + secondaryGroupSize: 0, + groupSeparator: ',', + decimalSeparator: '.', + fractionGroupSize: 0, + fractionGroupSeparator: '\xA0', // non-breaking space + suffix: '' + }, + + // The alphabet used for base conversion. It must be at least 2 characters long, with no '+', + // '-', '.', whitespace, or repeated character. + // '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ$_' + ALPHABET = '0123456789abcdefghijklmnopqrstuvwxyz', + alphabetHasNormalDecimalDigits = true; + + + //------------------------------------------------------------------------------------------ + + + // CONSTRUCTOR + + + /* + * The BigNumber constructor and exported function. + * Create and return a new instance of a BigNumber object. + * + * v {number|string|BigNumber} A numeric value. + * [b] {number} The base of v. Integer, 2 to ALPHABET.length inclusive. + */ + function BigNumber(v, b) { + var alphabet, c, caseChanged, e, i, isNum, len, str, + x = this; + + // Enable constructor call without `new`. + if (!(x instanceof BigNumber)) return new BigNumber(v, b); + + if (b == null) { + + if (v && v._isBigNumber === true) { + x.s = v.s; + + if (!v.c || v.e > MAX_EXP) { + x.c = x.e = null; + } else if (v.e < MIN_EXP) { + x.c = [x.e = 0]; + } else { + x.e = v.e; + x.c = v.c.slice(); + } + + return; + } + + if ((isNum = typeof v == 'number') && v * 0 == 0) { + + // Use `1 / n` to handle minus zero also. + x.s = 1 / v < 0 ? (v = -v, -1) : 1; + + // Fast path for integers, where n < 2147483648 (2**31). + if (v === ~~v) { + for (e = 0, i = v; i >= 10; i /= 10, e++); + + if (e > MAX_EXP) { + x.c = x.e = null; + } else { + x.e = e; + x.c = [v]; + } + + return; + } + + str = String(v); + } else { + + if (!isNumeric.test(str = String(v))) return parseNumeric(x, str, isNum); + + x.s = str.charCodeAt(0) == 45 ? (str = str.slice(1), -1) : 1; + } + + // Decimal point? + if ((e = str.indexOf('.')) > -1) str = str.replace('.', ''); + + // Exponential form? + if ((i = str.search(/e/i)) > 0) { + + // Determine exponent. + if (e < 0) e = i; + e += +str.slice(i + 1); + str = str.substring(0, i); + } else if (e < 0) { + + // Integer. + e = str.length; + } + + } else { + + // '[BigNumber Error] Base {not a primitive number|not an integer|out of range}: {b}' + intCheck(b, 2, ALPHABET.length, 'Base'); + + // Allow exponential notation to be used with base 10 argument, while + // also rounding to DECIMAL_PLACES as with other bases. + if (b == 10 && alphabetHasNormalDecimalDigits) { + x = new BigNumber(v); + return round(x, DECIMAL_PLACES + x.e + 1, ROUNDING_MODE); + } + + str = String(v); + + if (isNum = typeof v == 'number') { + + // Avoid potential interpretation of Infinity and NaN as base 44+ values. + if (v * 0 != 0) return parseNumeric(x, str, isNum, b); + + x.s = 1 / v < 0 ? (str = str.slice(1), -1) : 1; + + // '[BigNumber Error] Number primitive has more than 15 significant digits: {n}' + if (BigNumber.DEBUG && str.replace(/^0\.0*|\./, '').length > 15) { + throw Error + (tooManyDigits + v); + } + } else { + x.s = str.charCodeAt(0) === 45 ? (str = str.slice(1), -1) : 1; + } + + alphabet = ALPHABET.slice(0, b); + e = i = 0; + + // Check that str is a valid base b number. + // Don't use RegExp, so alphabet can contain special characters. + for (len = str.length; i < len; i++) { + if (alphabet.indexOf(c = str.charAt(i)) < 0) { + if (c == '.') { + + // If '.' is not the first character and it has not be found before. + if (i > e) { + e = len; + continue; + } + } else if (!caseChanged) { + + // Allow e.g. hexadecimal 'FF' as well as 'ff'. + if (str == str.toUpperCase() && (str = str.toLowerCase()) || + str == str.toLowerCase() && (str = str.toUpperCase())) { + caseChanged = true; + i = -1; + e = 0; + continue; + } + } + + return parseNumeric(x, String(v), isNum, b); + } + } + + // Prevent later check for length on converted number. + isNum = false; + str = convertBase(str, b, 10, x.s); + + // Decimal point? + if ((e = str.indexOf('.')) > -1) str = str.replace('.', ''); + else e = str.length; + } + + // Determine leading zeros. + for (i = 0; str.charCodeAt(i) === 48; i++); + + // Determine trailing zeros. + for (len = str.length; str.charCodeAt(--len) === 48;); + + if (str = str.slice(i, ++len)) { + len -= i; + + // '[BigNumber Error] Number primitive has more than 15 significant digits: {n}' + if (isNum && BigNumber.DEBUG && + len > 15 && (v > MAX_SAFE_INTEGER || v !== mathfloor(v))) { + throw Error + (tooManyDigits + (x.s * v)); + } + + // Overflow? + if ((e = e - i - 1) > MAX_EXP) { + + // Infinity. + x.c = x.e = null; + + // Underflow? + } else if (e < MIN_EXP) { + + // Zero. + x.c = [x.e = 0]; + } else { + x.e = e; + x.c = []; + + // Transform base + + // e is the base 10 exponent. + // i is where to slice str to get the first element of the coefficient array. + i = (e + 1) % LOG_BASE; + if (e < 0) i += LOG_BASE; // i < 1 + + if (i < len) { + if (i) x.c.push(+str.slice(0, i)); + + for (len -= LOG_BASE; i < len;) { + x.c.push(+str.slice(i, i += LOG_BASE)); + } + + i = LOG_BASE - (str = str.slice(i)).length; + } else { + i -= len; + } + + for (; i--; str += '0'); + x.c.push(+str); + } + } else { + + // Zero. + x.c = [x.e = 0]; + } + } + + + // CONSTRUCTOR PROPERTIES + + + BigNumber.clone = clone; + + BigNumber.ROUND_UP = 0; + BigNumber.ROUND_DOWN = 1; + BigNumber.ROUND_CEIL = 2; + BigNumber.ROUND_FLOOR = 3; + BigNumber.ROUND_HALF_UP = 4; + BigNumber.ROUND_HALF_DOWN = 5; + BigNumber.ROUND_HALF_EVEN = 6; + BigNumber.ROUND_HALF_CEIL = 7; + BigNumber.ROUND_HALF_FLOOR = 8; + BigNumber.EUCLID = 9; + + + /* + * Configure infrequently-changing library-wide settings. + * + * Accept an object with the following optional properties (if the value of a property is + * a number, it must be an integer within the inclusive range stated): + * + * DECIMAL_PLACES {number} 0 to MAX + * ROUNDING_MODE {number} 0 to 8 + * EXPONENTIAL_AT {number|number[]} -MAX to MAX or [-MAX to 0, 0 to MAX] + * RANGE {number|number[]} -MAX to MAX (not zero) or [-MAX to -1, 1 to MAX] + * CRYPTO {boolean} true or false + * MODULO_MODE {number} 0 to 9 + * POW_PRECISION {number} 0 to MAX + * ALPHABET {string} A string of two or more unique characters which does + * not contain '.'. + * FORMAT {object} An object with some of the following properties: + * prefix {string} + * groupSize {number} + * secondaryGroupSize {number} + * groupSeparator {string} + * decimalSeparator {string} + * fractionGroupSize {number} + * fractionGroupSeparator {string} + * suffix {string} + * + * (The values assigned to the above FORMAT object properties are not checked for validity.) + * + * E.g. + * BigNumber.config({ DECIMAL_PLACES : 20, ROUNDING_MODE : 4 }) + * + * Ignore properties/parameters set to null or undefined, except for ALPHABET. + * + * Return an object with the properties current values. + */ + BigNumber.config = BigNumber.set = function (obj) { + var p, v; + + if (obj != null) { + + if (typeof obj == 'object') { + + // DECIMAL_PLACES {number} Integer, 0 to MAX inclusive. + // '[BigNumber Error] DECIMAL_PLACES {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'DECIMAL_PLACES')) { + v = obj[p]; + intCheck(v, 0, MAX, p); + DECIMAL_PLACES = v; + } + + // ROUNDING_MODE {number} Integer, 0 to 8 inclusive. + // '[BigNumber Error] ROUNDING_MODE {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'ROUNDING_MODE')) { + v = obj[p]; + intCheck(v, 0, 8, p); + ROUNDING_MODE = v; + } + + // EXPONENTIAL_AT {number|number[]} + // Integer, -MAX to MAX inclusive or + // [integer -MAX to 0 inclusive, 0 to MAX inclusive]. + // '[BigNumber Error] EXPONENTIAL_AT {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'EXPONENTIAL_AT')) { + v = obj[p]; + if (v && v.pop) { + intCheck(v[0], -MAX, 0, p); + intCheck(v[1], 0, MAX, p); + TO_EXP_NEG = v[0]; + TO_EXP_POS = v[1]; + } else { + intCheck(v, -MAX, MAX, p); + TO_EXP_NEG = -(TO_EXP_POS = v < 0 ? -v : v); + } + } + + // RANGE {number|number[]} Non-zero integer, -MAX to MAX inclusive or + // [integer -MAX to -1 inclusive, integer 1 to MAX inclusive]. + // '[BigNumber Error] RANGE {not a primitive number|not an integer|out of range|cannot be zero}: {v}' + if (obj.hasOwnProperty(p = 'RANGE')) { + v = obj[p]; + if (v && v.pop) { + intCheck(v[0], -MAX, -1, p); + intCheck(v[1], 1, MAX, p); + MIN_EXP = v[0]; + MAX_EXP = v[1]; + } else { + intCheck(v, -MAX, MAX, p); + if (v) { + MIN_EXP = -(MAX_EXP = v < 0 ? -v : v); + } else { + throw Error + (bignumberError + p + ' cannot be zero: ' + v); + } + } + } + + // CRYPTO {boolean} true or false. + // '[BigNumber Error] CRYPTO not true or false: {v}' + // '[BigNumber Error] crypto unavailable' + if (obj.hasOwnProperty(p = 'CRYPTO')) { + v = obj[p]; + if (v === !!v) { + if (v) { + if (typeof crypto != 'undefined' && crypto && + (crypto.getRandomValues || crypto.randomBytes)) { + CRYPTO = v; + } else { + CRYPTO = !v; + throw Error + (bignumberError + 'crypto unavailable'); + } + } else { + CRYPTO = v; + } + } else { + throw Error + (bignumberError + p + ' not true or false: ' + v); + } + } + + // MODULO_MODE {number} Integer, 0 to 9 inclusive. + // '[BigNumber Error] MODULO_MODE {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'MODULO_MODE')) { + v = obj[p]; + intCheck(v, 0, 9, p); + MODULO_MODE = v; + } + + // POW_PRECISION {number} Integer, 0 to MAX inclusive. + // '[BigNumber Error] POW_PRECISION {not a primitive number|not an integer|out of range}: {v}' + if (obj.hasOwnProperty(p = 'POW_PRECISION')) { + v = obj[p]; + intCheck(v, 0, MAX, p); + POW_PRECISION = v; + } + + // FORMAT {object} + // '[BigNumber Error] FORMAT not an object: {v}' + if (obj.hasOwnProperty(p = 'FORMAT')) { + v = obj[p]; + if (typeof v == 'object') FORMAT = v; + else throw Error + (bignumberError + p + ' not an object: ' + v); + } + + // ALPHABET {string} + // '[BigNumber Error] ALPHABET invalid: {v}' + if (obj.hasOwnProperty(p = 'ALPHABET')) { + v = obj[p]; + + // Disallow if less than two characters, + // or if it contains '+', '-', '.', whitespace, or a repeated character. + if (typeof v == 'string' && !/^.?$|[+\-.\s]|(.).*\1/.test(v)) { + alphabetHasNormalDecimalDigits = v.slice(0, 10) == '0123456789'; + ALPHABET = v; + } else { + throw Error + (bignumberError + p + ' invalid: ' + v); + } + } + + } else { + + // '[BigNumber Error] Object expected: {v}' + throw Error + (bignumberError + 'Object expected: ' + obj); + } + } + + return { + DECIMAL_PLACES: DECIMAL_PLACES, + ROUNDING_MODE: ROUNDING_MODE, + EXPONENTIAL_AT: [TO_EXP_NEG, TO_EXP_POS], + RANGE: [MIN_EXP, MAX_EXP], + CRYPTO: CRYPTO, + MODULO_MODE: MODULO_MODE, + POW_PRECISION: POW_PRECISION, + FORMAT: FORMAT, + ALPHABET: ALPHABET + }; + }; + + + /* + * Return true if v is a BigNumber instance, otherwise return false. + * + * If BigNumber.DEBUG is true, throw if a BigNumber instance is not well-formed. + * + * v {any} + * + * '[BigNumber Error] Invalid BigNumber: {v}' + */ + BigNumber.isBigNumber = function (v) { + if (!v || v._isBigNumber !== true) return false; + if (!BigNumber.DEBUG) return true; + + var i, n, + c = v.c, + e = v.e, + s = v.s; + + out: if ({}.toString.call(c) == '[object Array]') { + + if ((s === 1 || s === -1) && e >= -MAX && e <= MAX && e === mathfloor(e)) { + + // If the first element is zero, the BigNumber value must be zero. + if (c[0] === 0) { + if (e === 0 && c.length === 1) return true; + break out; + } + + // Calculate number of digits that c[0] should have, based on the exponent. + i = (e + 1) % LOG_BASE; + if (i < 1) i += LOG_BASE; + + // Calculate number of digits of c[0]. + //if (Math.ceil(Math.log(c[0] + 1) / Math.LN10) == i) { + if (String(c[0]).length == i) { + + for (i = 0; i < c.length; i++) { + n = c[i]; + if (n < 0 || n >= BASE || n !== mathfloor(n)) break out; + } + + // Last element cannot be zero, unless it is the only element. + if (n !== 0) return true; + } + } + + // Infinity/NaN + } else if (c === null && e === null && (s === null || s === 1 || s === -1)) { + return true; + } + + throw Error + (bignumberError + 'Invalid BigNumber: ' + v); + }; + + + /* + * Return a new BigNumber whose value is the maximum of the arguments. + * + * arguments {number|string|BigNumber} + */ + BigNumber.maximum = BigNumber.max = function () { + return maxOrMin(arguments, -1); + }; + + + /* + * Return a new BigNumber whose value is the minimum of the arguments. + * + * arguments {number|string|BigNumber} + */ + BigNumber.minimum = BigNumber.min = function () { + return maxOrMin(arguments, 1); + }; + + + /* + * Return a new BigNumber with a random value equal to or greater than 0 and less than 1, + * and with dp, or DECIMAL_PLACES if dp is omitted, decimal places (or less if trailing + * zeros are produced). + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp}' + * '[BigNumber Error] crypto unavailable' + */ + BigNumber.random = (function () { + var pow2_53 = 0x20000000000000; + + // Return a 53 bit integer n, where 0 <= n < 9007199254740992. + // Check if Math.random() produces more than 32 bits of randomness. + // If it does, assume at least 53 bits are produced, otherwise assume at least 30 bits. + // 0x40000000 is 2^30, 0x800000 is 2^23, 0x1fffff is 2^21 - 1. + var random53bitInt = (Math.random() * pow2_53) & 0x1fffff + ? function () { return mathfloor(Math.random() * pow2_53); } + : function () { return ((Math.random() * 0x40000000 | 0) * 0x800000) + + (Math.random() * 0x800000 | 0); }; + + return function (dp) { + var a, b, e, k, v, + i = 0, + c = [], + rand = new BigNumber(ONE); + + if (dp == null) dp = DECIMAL_PLACES; + else intCheck(dp, 0, MAX); + + k = mathceil(dp / LOG_BASE); + + if (CRYPTO) { + + // Browsers supporting crypto.getRandomValues. + if (crypto.getRandomValues) { + + a = crypto.getRandomValues(new Uint32Array(k *= 2)); + + for (; i < k;) { + + // 53 bits: + // ((Math.pow(2, 32) - 1) * Math.pow(2, 21)).toString(2) + // 11111 11111111 11111111 11111111 11100000 00000000 00000000 + // ((Math.pow(2, 32) - 1) >>> 11).toString(2) + // 11111 11111111 11111111 + // 0x20000 is 2^21. + v = a[i] * 0x20000 + (a[i + 1] >>> 11); + + // Rejection sampling: + // 0 <= v < 9007199254740992 + // Probability that v >= 9e15, is + // 7199254740992 / 9007199254740992 ~= 0.0008, i.e. 1 in 1251 + if (v >= 9e15) { + b = crypto.getRandomValues(new Uint32Array(2)); + a[i] = b[0]; + a[i + 1] = b[1]; + } else { + + // 0 <= v <= 8999999999999999 + // 0 <= (v % 1e14) <= 99999999999999 + c.push(v % 1e14); + i += 2; + } + } + i = k / 2; + + // Node.js supporting crypto.randomBytes. + } else if (crypto.randomBytes) { + + // buffer + a = crypto.randomBytes(k *= 7); + + for (; i < k;) { + + // 0x1000000000000 is 2^48, 0x10000000000 is 2^40 + // 0x100000000 is 2^32, 0x1000000 is 2^24 + // 11111 11111111 11111111 11111111 11111111 11111111 11111111 + // 0 <= v < 9007199254740992 + v = ((a[i] & 31) * 0x1000000000000) + (a[i + 1] * 0x10000000000) + + (a[i + 2] * 0x100000000) + (a[i + 3] * 0x1000000) + + (a[i + 4] << 16) + (a[i + 5] << 8) + a[i + 6]; + + if (v >= 9e15) { + crypto.randomBytes(7).copy(a, i); + } else { + + // 0 <= (v % 1e14) <= 99999999999999 + c.push(v % 1e14); + i += 7; + } + } + i = k / 7; + } else { + CRYPTO = false; + throw Error + (bignumberError + 'crypto unavailable'); + } + } + + // Use Math.random. + if (!CRYPTO) { + + for (; i < k;) { + v = random53bitInt(); + if (v < 9e15) c[i++] = v % 1e14; + } + } + + k = c[--i]; + dp %= LOG_BASE; + + // Convert trailing digits to zeros according to dp. + if (k && dp) { + v = POWS_TEN[LOG_BASE - dp]; + c[i] = mathfloor(k / v) * v; + } + + // Remove trailing elements which are zero. + for (; c[i] === 0; c.pop(), i--); + + // Zero? + if (i < 0) { + c = [e = 0]; + } else { + + // Remove leading elements which are zero and adjust exponent accordingly. + for (e = -1 ; c[0] === 0; c.splice(0, 1), e -= LOG_BASE); + + // Count the digits of the first element of c to determine leading zeros, and... + for (i = 1, v = c[0]; v >= 10; v /= 10, i++); + + // adjust the exponent accordingly. + if (i < LOG_BASE) e -= LOG_BASE - i; + } + + rand.e = e; + rand.c = c; + return rand; + }; + })(); + + + /* + * Return a BigNumber whose value is the sum of the arguments. + * + * arguments {number|string|BigNumber} + */ + BigNumber.sum = function () { + var i = 1, + args = arguments, + sum = new BigNumber(args[0]); + for (; i < args.length;) sum = sum.plus(args[i++]); + return sum; + }; + + + // PRIVATE FUNCTIONS + + + // Called by BigNumber and BigNumber.prototype.toString. + convertBase = (function () { + var decimal = '0123456789'; + + /* + * Convert string of baseIn to an array of numbers of baseOut. + * Eg. toBaseOut('255', 10, 16) returns [15, 15]. + * Eg. toBaseOut('ff', 16, 10) returns [2, 5, 5]. + */ + function toBaseOut(str, baseIn, baseOut, alphabet) { + var j, + arr = [0], + arrL, + i = 0, + len = str.length; + + for (; i < len;) { + for (arrL = arr.length; arrL--; arr[arrL] *= baseIn); + + arr[0] += alphabet.indexOf(str.charAt(i++)); + + for (j = 0; j < arr.length; j++) { + + if (arr[j] > baseOut - 1) { + if (arr[j + 1] == null) arr[j + 1] = 0; + arr[j + 1] += arr[j] / baseOut | 0; + arr[j] %= baseOut; + } + } + } + + return arr.reverse(); + } + + // Convert a numeric string of baseIn to a numeric string of baseOut. + // If the caller is toString, we are converting from base 10 to baseOut. + // If the caller is BigNumber, we are converting from baseIn to base 10. + return function (str, baseIn, baseOut, sign, callerIsToString) { + var alphabet, d, e, k, r, x, xc, y, + i = str.indexOf('.'), + dp = DECIMAL_PLACES, + rm = ROUNDING_MODE; + + // Non-integer. + if (i >= 0) { + k = POW_PRECISION; + + // Unlimited precision. + POW_PRECISION = 0; + str = str.replace('.', ''); + y = new BigNumber(baseIn); + x = y.pow(str.length - i); + POW_PRECISION = k; + + // Convert str as if an integer, then restore the fraction part by dividing the + // result by its base raised to a power. + + y.c = toBaseOut(toFixedPoint(coeffToString(x.c), x.e, '0'), + 10, baseOut, decimal); + y.e = y.c.length; + } + + // Convert the number as integer. + + xc = toBaseOut(str, baseIn, baseOut, callerIsToString + ? (alphabet = ALPHABET, decimal) + : (alphabet = decimal, ALPHABET)); + + // xc now represents str as an integer and converted to baseOut. e is the exponent. + e = k = xc.length; + + // Remove trailing zeros. + for (; xc[--k] == 0; xc.pop()); + + // Zero? + if (!xc[0]) return alphabet.charAt(0); + + // Does str represent an integer? If so, no need for the division. + if (i < 0) { + --e; + } else { + x.c = xc; + x.e = e; + + // The sign is needed for correct rounding. + x.s = sign; + x = div(x, y, dp, rm, baseOut); + xc = x.c; + r = x.r; + e = x.e; + } + + // xc now represents str converted to baseOut. + + // The index of the rounding digit. + d = e + dp + 1; + + // The rounding digit: the digit to the right of the digit that may be rounded up. + i = xc[d]; + + // Look at the rounding digits and mode to determine whether to round up. + + k = baseOut / 2; + r = r || d < 0 || xc[d + 1] != null; + + r = rm < 4 ? (i != null || r) && (rm == 0 || rm == (x.s < 0 ? 3 : 2)) + : i > k || i == k &&(rm == 4 || r || rm == 6 && xc[d - 1] & 1 || + rm == (x.s < 0 ? 8 : 7)); + + // If the index of the rounding digit is not greater than zero, or xc represents + // zero, then the result of the base conversion is zero or, if rounding up, a value + // such as 0.00001. + if (d < 1 || !xc[0]) { + + // 1^-dp or 0 + str = r ? toFixedPoint(alphabet.charAt(1), -dp, alphabet.charAt(0)) : alphabet.charAt(0); + } else { + + // Truncate xc to the required number of decimal places. + xc.length = d; + + // Round up? + if (r) { + + // Rounding up may mean the previous digit has to be rounded up and so on. + for (--baseOut; ++xc[--d] > baseOut;) { + xc[d] = 0; + + if (!d) { + ++e; + xc = [1].concat(xc); + } + } + } + + // Determine trailing zeros. + for (k = xc.length; !xc[--k];); + + // E.g. [4, 11, 15] becomes 4bf. + for (i = 0, str = ''; i <= k; str += alphabet.charAt(xc[i++])); + + // Add leading zeros, decimal point and trailing zeros as required. + str = toFixedPoint(str, e, alphabet.charAt(0)); + } + + // The caller will add the sign. + return str; + }; + })(); + + + // Perform division in the specified base. Called by div and convertBase. + div = (function () { + + // Assume non-zero x and k. + function multiply(x, k, base) { + var m, temp, xlo, xhi, + carry = 0, + i = x.length, + klo = k % SQRT_BASE, + khi = k / SQRT_BASE | 0; + + for (x = x.slice(); i--;) { + xlo = x[i] % SQRT_BASE; + xhi = x[i] / SQRT_BASE | 0; + m = khi * xlo + xhi * klo; + temp = klo * xlo + ((m % SQRT_BASE) * SQRT_BASE) + carry; + carry = (temp / base | 0) + (m / SQRT_BASE | 0) + khi * xhi; + x[i] = temp % base; + } + + if (carry) x = [carry].concat(x); + + return x; + } + + function compare(a, b, aL, bL) { + var i, cmp; + + if (aL != bL) { + cmp = aL > bL ? 1 : -1; + } else { + + for (i = cmp = 0; i < aL; i++) { + + if (a[i] != b[i]) { + cmp = a[i] > b[i] ? 1 : -1; + break; + } + } + } + + return cmp; + } + + function subtract(a, b, aL, base) { + var i = 0; + + // Subtract b from a. + for (; aL--;) { + a[aL] -= i; + i = a[aL] < b[aL] ? 1 : 0; + a[aL] = i * base + a[aL] - b[aL]; + } + + // Remove leading zeros. + for (; !a[0] && a.length > 1; a.splice(0, 1)); + } + + // x: dividend, y: divisor. + return function (x, y, dp, rm, base) { + var cmp, e, i, more, n, prod, prodL, q, qc, rem, remL, rem0, xi, xL, yc0, + yL, yz, + s = x.s == y.s ? 1 : -1, + xc = x.c, + yc = y.c; + + // Either NaN, Infinity or 0? + if (!xc || !xc[0] || !yc || !yc[0]) { + + return new BigNumber( + + // Return NaN if either NaN, or both Infinity or 0. + !x.s || !y.s || (xc ? yc && xc[0] == yc[0] : !yc) ? NaN : + + // Return ±0 if x is ±0 or y is ±Infinity, or return ±Infinity as y is ±0. + xc && xc[0] == 0 || !yc ? s * 0 : s / 0 + ); + } + + q = new BigNumber(s); + qc = q.c = []; + e = x.e - y.e; + s = dp + e + 1; + + if (!base) { + base = BASE; + e = bitFloor(x.e / LOG_BASE) - bitFloor(y.e / LOG_BASE); + s = s / LOG_BASE | 0; + } + + // Result exponent may be one less then the current value of e. + // The coefficients of the BigNumbers from convertBase may have trailing zeros. + for (i = 0; yc[i] == (xc[i] || 0); i++); + + if (yc[i] > (xc[i] || 0)) e--; + + if (s < 0) { + qc.push(1); + more = true; + } else { + xL = xc.length; + yL = yc.length; + i = 0; + s += 2; + + // Normalise xc and yc so highest order digit of yc is >= base / 2. + + n = mathfloor(base / (yc[0] + 1)); + + // Not necessary, but to handle odd bases where yc[0] == (base / 2) - 1. + // if (n > 1 || n++ == 1 && yc[0] < base / 2) { + if (n > 1) { + yc = multiply(yc, n, base); + xc = multiply(xc, n, base); + yL = yc.length; + xL = xc.length; + } + + xi = yL; + rem = xc.slice(0, yL); + remL = rem.length; + + // Add zeros to make remainder as long as divisor. + for (; remL < yL; rem[remL++] = 0); + yz = yc.slice(); + yz = [0].concat(yz); + yc0 = yc[0]; + if (yc[1] >= base / 2) yc0++; + // Not necessary, but to prevent trial digit n > base, when using base 3. + // else if (base == 3 && yc0 == 1) yc0 = 1 + 1e-15; + + do { + n = 0; + + // Compare divisor and remainder. + cmp = compare(yc, rem, yL, remL); + + // If divisor < remainder. + if (cmp < 0) { + + // Calculate trial digit, n. + + rem0 = rem[0]; + if (yL != remL) rem0 = rem0 * base + (rem[1] || 0); + + // n is how many times the divisor goes into the current remainder. + n = mathfloor(rem0 / yc0); + + // Algorithm: + // product = divisor multiplied by trial digit (n). + // Compare product and remainder. + // If product is greater than remainder: + // Subtract divisor from product, decrement trial digit. + // Subtract product from remainder. + // If product was less than remainder at the last compare: + // Compare new remainder and divisor. + // If remainder is greater than divisor: + // Subtract divisor from remainder, increment trial digit. + + if (n > 1) { + + // n may be > base only when base is 3. + if (n >= base) n = base - 1; + + // product = divisor * trial digit. + prod = multiply(yc, n, base); + prodL = prod.length; + remL = rem.length; + + // Compare product and remainder. + // If product > remainder then trial digit n too high. + // n is 1 too high about 5% of the time, and is not known to have + // ever been more than 1 too high. + while (compare(prod, rem, prodL, remL) == 1) { + n--; + + // Subtract divisor from product. + subtract(prod, yL < prodL ? yz : yc, prodL, base); + prodL = prod.length; + cmp = 1; + } + } else { + + // n is 0 or 1, cmp is -1. + // If n is 0, there is no need to compare yc and rem again below, + // so change cmp to 1 to avoid it. + // If n is 1, leave cmp as -1, so yc and rem are compared again. + if (n == 0) { + + // divisor < remainder, so n must be at least 1. + cmp = n = 1; + } + + // product = divisor + prod = yc.slice(); + prodL = prod.length; + } + + if (prodL < remL) prod = [0].concat(prod); + + // Subtract product from remainder. + subtract(rem, prod, remL, base); + remL = rem.length; + + // If product was < remainder. + if (cmp == -1) { + + // Compare divisor and new remainder. + // If divisor < new remainder, subtract divisor from remainder. + // Trial digit n too low. + // n is 1 too low about 5% of the time, and very rarely 2 too low. + while (compare(yc, rem, yL, remL) < 1) { + n++; + + // Subtract divisor from remainder. + subtract(rem, yL < remL ? yz : yc, remL, base); + remL = rem.length; + } + } + } else if (cmp === 0) { + n++; + rem = [0]; + } // else cmp === 1 and n will be 0 + + // Add the next digit, n, to the result array. + qc[i++] = n; + + // Update the remainder. + if (rem[0]) { + rem[remL++] = xc[xi] || 0; + } else { + rem = [xc[xi]]; + remL = 1; + } + } while ((xi++ < xL || rem[0] != null) && s--); + + more = rem[0] != null; + + // Leading zero? + if (!qc[0]) qc.splice(0, 1); + } + + if (base == BASE) { + + // To calculate q.e, first get the number of digits of qc[0]. + for (i = 1, s = qc[0]; s >= 10; s /= 10, i++); + + round(q, dp + (q.e = i + e * LOG_BASE - 1) + 1, rm, more); + + // Caller is convertBase. + } else { + q.e = e; + q.r = +more; + } + + return q; + }; + })(); + + + /* + * Return a string representing the value of BigNumber n in fixed-point or exponential + * notation rounded to the specified decimal places or significant digits. + * + * n: a BigNumber. + * i: the index of the last digit required (i.e. the digit that may be rounded up). + * rm: the rounding mode. + * id: 1 (toExponential) or 2 (toPrecision). + */ + function format(n, i, rm, id) { + var c0, e, ne, len, str; + + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); + + if (!n.c) return n.toString(); + + c0 = n.c[0]; + ne = n.e; + + if (i == null) { + str = coeffToString(n.c); + str = id == 1 || id == 2 && (ne <= TO_EXP_NEG || ne >= TO_EXP_POS) + ? toExponential(str, ne) + : toFixedPoint(str, ne, '0'); + } else { + n = round(new BigNumber(n), i, rm); + + // n.e may have changed if the value was rounded up. + e = n.e; + + str = coeffToString(n.c); + len = str.length; + + // toPrecision returns exponential notation if the number of significant digits + // specified is less than the number of digits necessary to represent the integer + // part of the value in fixed-point notation. + + // Exponential notation. + if (id == 1 || id == 2 && (i <= e || e <= TO_EXP_NEG)) { + + // Append zeros? + for (; len < i; str += '0', len++); + str = toExponential(str, e); + + // Fixed-point notation. + } else { + i -= ne; + str = toFixedPoint(str, e, '0'); + + // Append zeros? + if (e + 1 > len) { + if (--i > 0) for (str += '.'; i--; str += '0'); + } else { + i += e - len; + if (i > 0) { + if (e + 1 == len) str += '.'; + for (; i--; str += '0'); + } + } + } + } + + return n.s < 0 && c0 ? '-' + str : str; + } + + + // Handle BigNumber.max and BigNumber.min. + // If any number is NaN, return NaN. + function maxOrMin(args, n) { + var k, y, + i = 1, + x = new BigNumber(args[0]); + + for (; i < args.length; i++) { + y = new BigNumber(args[i]); + if (!y.s || (k = compare(x, y)) === n || k === 0 && x.s === n) { + x = y; + } + } + + return x; + } + + + /* + * Strip trailing zeros, calculate base 10 exponent and check against MIN_EXP and MAX_EXP. + * Called by minus, plus and times. + */ + function normalise(n, c, e) { + var i = 1, + j = c.length; + + // Remove trailing zeros. + for (; !c[--j]; c.pop()); + + // Calculate the base 10 exponent. First get the number of digits of c[0]. + for (j = c[0]; j >= 10; j /= 10, i++); + + // Overflow? + if ((e = i + e * LOG_BASE - 1) > MAX_EXP) { + + // Infinity. + n.c = n.e = null; + + // Underflow? + } else if (e < MIN_EXP) { + + // Zero. + n.c = [n.e = 0]; + } else { + n.e = e; + n.c = c; + } + + return n; + } + + + // Handle values that fail the validity test in BigNumber. + parseNumeric = (function () { + var basePrefix = /^(-?)0([xbo])(?=\w[\w.]*$)/i, + dotAfter = /^([^.]+)\.$/, + dotBefore = /^\.([^.]+)$/, + isInfinityOrNaN = /^-?(Infinity|NaN)$/, + whitespaceOrPlus = /^\s*\+(?=[\w.])|^\s+|\s+$/g; + + return function (x, str, isNum, b) { + var base, + s = isNum ? str : str.replace(whitespaceOrPlus, ''); + + // No exception on ±Infinity or NaN. + if (isInfinityOrNaN.test(s)) { + x.s = isNaN(s) ? null : s < 0 ? -1 : 1; + } else { + if (!isNum) { + + // basePrefix = /^(-?)0([xbo])(?=\w[\w.]*$)/i + s = s.replace(basePrefix, function (m, p1, p2) { + base = (p2 = p2.toLowerCase()) == 'x' ? 16 : p2 == 'b' ? 2 : 8; + return !b || b == base ? p1 : m; + }); + + if (b) { + base = b; + + // E.g. '1.' to '1', '.1' to '0.1' + s = s.replace(dotAfter, '$1').replace(dotBefore, '0.$1'); + } + + if (str != s) return new BigNumber(s, base); + } + + // '[BigNumber Error] Not a number: {n}' + // '[BigNumber Error] Not a base {b} number: {n}' + if (BigNumber.DEBUG) { + throw Error + (bignumberError + 'Not a' + (b ? ' base ' + b : '') + ' number: ' + str); + } + + // NaN + x.s = null; + } + + x.c = x.e = null; + } + })(); + + + /* + * Round x to sd significant digits using rounding mode rm. Check for over/under-flow. + * If r is truthy, it is known that there are more digits after the rounding digit. + */ + function round(x, sd, rm, r) { + var d, i, j, k, n, ni, rd, + xc = x.c, + pows10 = POWS_TEN; + + // if x is not Infinity or NaN... + if (xc) { + + // rd is the rounding digit, i.e. the digit after the digit that may be rounded up. + // n is a base 1e14 number, the value of the element of array x.c containing rd. + // ni is the index of n within x.c. + // d is the number of digits of n. + // i is the index of rd within n including leading zeros. + // j is the actual index of rd within n (if < 0, rd is a leading zero). + out: { + + // Get the number of digits of the first element of xc. + for (d = 1, k = xc[0]; k >= 10; k /= 10, d++); + i = sd - d; + + // If the rounding digit is in the first element of xc... + if (i < 0) { + i += LOG_BASE; + j = sd; + n = xc[ni = 0]; + + // Get the rounding digit at index j of n. + rd = mathfloor(n / pows10[d - j - 1] % 10); + } else { + ni = mathceil((i + 1) / LOG_BASE); + + if (ni >= xc.length) { + + if (r) { + + // Needed by sqrt. + for (; xc.length <= ni; xc.push(0)); + n = rd = 0; + d = 1; + i %= LOG_BASE; + j = i - LOG_BASE + 1; + } else { + break out; + } + } else { + n = k = xc[ni]; + + // Get the number of digits of n. + for (d = 1; k >= 10; k /= 10, d++); + + // Get the index of rd within n. + i %= LOG_BASE; + + // Get the index of rd within n, adjusted for leading zeros. + // The number of leading zeros of n is given by LOG_BASE - d. + j = i - LOG_BASE + d; + + // Get the rounding digit at index j of n. + rd = j < 0 ? 0 : mathfloor(n / pows10[d - j - 1] % 10); + } + } + + r = r || sd < 0 || + + // Are there any non-zero digits after the rounding digit? + // The expression n % pows10[d - j - 1] returns all digits of n to the right + // of the digit at j, e.g. if n is 908714 and j is 2, the expression gives 714. + xc[ni + 1] != null || (j < 0 ? n : n % pows10[d - j - 1]); + + r = rm < 4 + ? (rd || r) && (rm == 0 || rm == (x.s < 0 ? 3 : 2)) + : rd > 5 || rd == 5 && (rm == 4 || r || rm == 6 && + + // Check whether the digit to the left of the rounding digit is odd. + ((i > 0 ? j > 0 ? n / pows10[d - j] : 0 : xc[ni - 1]) % 10) & 1 || + rm == (x.s < 0 ? 8 : 7)); + + if (sd < 1 || !xc[0]) { + xc.length = 0; + + if (r) { + + // Convert sd to decimal places. + sd -= x.e + 1; + + // 1, 0.1, 0.01, 0.001, 0.0001 etc. + xc[0] = pows10[(LOG_BASE - sd % LOG_BASE) % LOG_BASE]; + x.e = -sd || 0; + } else { + + // Zero. + xc[0] = x.e = 0; + } + + return x; + } + + // Remove excess digits. + if (i == 0) { + xc.length = ni; + k = 1; + ni--; + } else { + xc.length = ni + 1; + k = pows10[LOG_BASE - i]; + + // E.g. 56700 becomes 56000 if 7 is the rounding digit. + // j > 0 means i > number of leading zeros of n. + xc[ni] = j > 0 ? mathfloor(n / pows10[d - j] % pows10[j]) * k : 0; + } + + // Round up? + if (r) { + + for (; ;) { + + // If the digit to be rounded up is in the first element of xc... + if (ni == 0) { + + // i will be the length of xc[0] before k is added. + for (i = 1, j = xc[0]; j >= 10; j /= 10, i++); + j = xc[0] += k; + for (k = 1; j >= 10; j /= 10, k++); + + // if i != k the length has increased. + if (i != k) { + x.e++; + if (xc[0] == BASE) xc[0] = 1; + } + + break; + } else { + xc[ni] += k; + if (xc[ni] != BASE) break; + xc[ni--] = 0; + k = 1; + } + } + } + + // Remove trailing zeros. + for (i = xc.length; xc[--i] === 0; xc.pop()); + } + + // Overflow? Infinity. + if (x.e > MAX_EXP) { + x.c = x.e = null; + + // Underflow? Zero. + } else if (x.e < MIN_EXP) { + x.c = [x.e = 0]; + } + } + + return x; + } + + + function valueOf(n) { + var str, + e = n.e; + + if (e === null) return n.toString(); + + str = coeffToString(n.c); + + str = e <= TO_EXP_NEG || e >= TO_EXP_POS + ? toExponential(str, e) + : toFixedPoint(str, e, '0'); + + return n.s < 0 ? '-' + str : str; + } + + + // PROTOTYPE/INSTANCE METHODS + + + /* + * Return a new BigNumber whose value is the absolute value of this BigNumber. + */ + P.absoluteValue = P.abs = function () { + var x = new BigNumber(this); + if (x.s < 0) x.s = 1; + return x; + }; + + + /* + * Return + * 1 if the value of this BigNumber is greater than the value of BigNumber(y, b), + * -1 if the value of this BigNumber is less than the value of BigNumber(y, b), + * 0 if they have the same value, + * or null if the value of either is NaN. + */ + P.comparedTo = function (y, b) { + return compare(this, new BigNumber(y, b)); + }; + + + /* + * If dp is undefined or null or true or false, return the number of decimal places of the + * value of this BigNumber, or null if the value of this BigNumber is ±Infinity or NaN. + * + * Otherwise, if dp is a number, return a new BigNumber whose value is the value of this + * BigNumber rounded to a maximum of dp decimal places using rounding mode rm, or + * ROUNDING_MODE if rm is omitted. + * + * [dp] {number} Decimal places: integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + */ + P.decimalPlaces = P.dp = function (dp, rm) { + var c, n, v, + x = this; + + if (dp != null) { + intCheck(dp, 0, MAX); + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); + + return round(new BigNumber(x), dp + x.e + 1, rm); + } + + if (!(c = x.c)) return null; + n = ((v = c.length - 1) - bitFloor(this.e / LOG_BASE)) * LOG_BASE; + + // Subtract the number of trailing zeros of the last number. + if (v = c[v]) for (; v % 10 == 0; v /= 10, n--); + if (n < 0) n = 0; + + return n; + }; + + + /* + * n / 0 = I + * n / N = N + * n / I = 0 + * 0 / n = 0 + * 0 / 0 = N + * 0 / N = N + * 0 / I = 0 + * N / n = N + * N / 0 = N + * N / N = N + * N / I = N + * I / n = I + * I / 0 = I + * I / N = N + * I / I = N + * + * Return a new BigNumber whose value is the value of this BigNumber divided by the value of + * BigNumber(y, b), rounded according to DECIMAL_PLACES and ROUNDING_MODE. + */ + P.dividedBy = P.div = function (y, b) { + return div(this, new BigNumber(y, b), DECIMAL_PLACES, ROUNDING_MODE); + }; + + + /* + * Return a new BigNumber whose value is the integer part of dividing the value of this + * BigNumber by the value of BigNumber(y, b). + */ + P.dividedToIntegerBy = P.idiv = function (y, b) { + return div(this, new BigNumber(y, b), 0, 1); + }; + + + /* + * Return a BigNumber whose value is the value of this BigNumber exponentiated by n. + * + * If m is present, return the result modulo m. + * If n is negative round according to DECIMAL_PLACES and ROUNDING_MODE. + * If POW_PRECISION is non-zero and m is not present, round to POW_PRECISION using ROUNDING_MODE. + * + * The modular power operation works efficiently when x, n, and m are integers, otherwise it + * is equivalent to calculating x.exponentiatedBy(n).modulo(m) with a POW_PRECISION of 0. + * + * n {number|string|BigNumber} The exponent. An integer. + * [m] {number|string|BigNumber} The modulus. + * + * '[BigNumber Error] Exponent not an integer: {n}' + */ + P.exponentiatedBy = P.pow = function (n, m) { + var half, isModExp, i, k, more, nIsBig, nIsNeg, nIsOdd, y, + x = this; + + n = new BigNumber(n); + + // Allow NaN and ±Infinity, but not other non-integers. + if (n.c && !n.isInteger()) { + throw Error + (bignumberError + 'Exponent not an integer: ' + valueOf(n)); + } + + if (m != null) m = new BigNumber(m); + + // Exponent of MAX_SAFE_INTEGER is 15. + nIsBig = n.e > 14; + + // If x is NaN, ±Infinity, ±0 or ±1, or n is ±Infinity, NaN or ±0. + if (!x.c || !x.c[0] || x.c[0] == 1 && !x.e && x.c.length == 1 || !n.c || !n.c[0]) { + + // The sign of the result of pow when x is negative depends on the evenness of n. + // If +n overflows to ±Infinity, the evenness of n would be not be known. + y = new BigNumber(Math.pow(+valueOf(x), nIsBig ? n.s * (2 - isOdd(n)) : +valueOf(n))); + return m ? y.mod(m) : y; + } + + nIsNeg = n.s < 0; + + if (m) { + + // x % m returns NaN if abs(m) is zero, or m is NaN. + if (m.c ? !m.c[0] : !m.s) return new BigNumber(NaN); + + isModExp = !nIsNeg && x.isInteger() && m.isInteger(); + + if (isModExp) x = x.mod(m); + + // Overflow to ±Infinity: >=2**1e10 or >=1.0000024**1e15. + // Underflow to ±0: <=0.79**1e10 or <=0.9999975**1e15. + } else if (n.e > 9 && (x.e > 0 || x.e < -1 || (x.e == 0 + // [1, 240000000] + ? x.c[0] > 1 || nIsBig && x.c[1] >= 24e7 + // [80000000000000] [99999750000000] + : x.c[0] < 8e13 || nIsBig && x.c[0] <= 9999975e7))) { + + // If x is negative and n is odd, k = -0, else k = 0. + k = x.s < 0 && isOdd(n) ? -0 : 0; + + // If x >= 1, k = ±Infinity. + if (x.e > -1) k = 1 / k; + + // If n is negative return ±0, else return ±Infinity. + return new BigNumber(nIsNeg ? 1 / k : k); + + } else if (POW_PRECISION) { + + // Truncating each coefficient array to a length of k after each multiplication + // equates to truncating significant digits to POW_PRECISION + [28, 41], + // i.e. there will be a minimum of 28 guard digits retained. + k = mathceil(POW_PRECISION / LOG_BASE + 2); + } + + if (nIsBig) { + half = new BigNumber(0.5); + if (nIsNeg) n.s = 1; + nIsOdd = isOdd(n); + } else { + i = Math.abs(+valueOf(n)); + nIsOdd = i % 2; + } + + y = new BigNumber(ONE); + + // Performs 54 loop iterations for n of 9007199254740991. + for (; ;) { + + if (nIsOdd) { + y = y.times(x); + if (!y.c) break; + + if (k) { + if (y.c.length > k) y.c.length = k; + } else if (isModExp) { + y = y.mod(m); //y = y.minus(div(y, m, 0, MODULO_MODE).times(m)); + } + } + + if (i) { + i = mathfloor(i / 2); + if (i === 0) break; + nIsOdd = i % 2; + } else { + n = n.times(half); + round(n, n.e + 1, 1); + + if (n.e > 14) { + nIsOdd = isOdd(n); + } else { + i = +valueOf(n); + if (i === 0) break; + nIsOdd = i % 2; + } + } + + x = x.times(x); + + if (k) { + if (x.c && x.c.length > k) x.c.length = k; + } else if (isModExp) { + x = x.mod(m); //x = x.minus(div(x, m, 0, MODULO_MODE).times(m)); + } + } + + if (isModExp) return y; + if (nIsNeg) y = ONE.div(y); + + return m ? y.mod(m) : k ? round(y, POW_PRECISION, ROUNDING_MODE, more) : y; + }; + + + /* + * Return a new BigNumber whose value is the value of this BigNumber rounded to an integer + * using rounding mode rm, or ROUNDING_MODE if rm is omitted. + * + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {rm}' + */ + P.integerValue = function (rm) { + var n = new BigNumber(this); + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); + return round(n, n.e + 1, rm); + }; + + + /* + * Return true if the value of this BigNumber is equal to the value of BigNumber(y, b), + * otherwise return false. + */ + P.isEqualTo = P.eq = function (y, b) { + return compare(this, new BigNumber(y, b)) === 0; + }; + + + /* + * Return true if the value of this BigNumber is a finite number, otherwise return false. + */ + P.isFinite = function () { + return !!this.c; + }; + + + /* + * Return true if the value of this BigNumber is greater than the value of BigNumber(y, b), + * otherwise return false. + */ + P.isGreaterThan = P.gt = function (y, b) { + return compare(this, new BigNumber(y, b)) > 0; + }; + + + /* + * Return true if the value of this BigNumber is greater than or equal to the value of + * BigNumber(y, b), otherwise return false. + */ + P.isGreaterThanOrEqualTo = P.gte = function (y, b) { + return (b = compare(this, new BigNumber(y, b))) === 1 || b === 0; + + }; + + + /* + * Return true if the value of this BigNumber is an integer, otherwise return false. + */ + P.isInteger = function () { + return !!this.c && bitFloor(this.e / LOG_BASE) > this.c.length - 2; + }; + + + /* + * Return true if the value of this BigNumber is less than the value of BigNumber(y, b), + * otherwise return false. + */ + P.isLessThan = P.lt = function (y, b) { + return compare(this, new BigNumber(y, b)) < 0; + }; + + + /* + * Return true if the value of this BigNumber is less than or equal to the value of + * BigNumber(y, b), otherwise return false. + */ + P.isLessThanOrEqualTo = P.lte = function (y, b) { + return (b = compare(this, new BigNumber(y, b))) === -1 || b === 0; + }; + + + /* + * Return true if the value of this BigNumber is NaN, otherwise return false. + */ + P.isNaN = function () { + return !this.s; + }; + + + /* + * Return true if the value of this BigNumber is negative, otherwise return false. + */ + P.isNegative = function () { + return this.s < 0; + }; + + + /* + * Return true if the value of this BigNumber is positive, otherwise return false. + */ + P.isPositive = function () { + return this.s > 0; + }; + + + /* + * Return true if the value of this BigNumber is 0 or -0, otherwise return false. + */ + P.isZero = function () { + return !!this.c && this.c[0] == 0; + }; + + + /* + * n - 0 = n + * n - N = N + * n - I = -I + * 0 - n = -n + * 0 - 0 = 0 + * 0 - N = N + * 0 - I = -I + * N - n = N + * N - 0 = N + * N - N = N + * N - I = N + * I - n = I + * I - 0 = I + * I - N = N + * I - I = N + * + * Return a new BigNumber whose value is the value of this BigNumber minus the value of + * BigNumber(y, b). + */ + P.minus = function (y, b) { + var i, j, t, xLTy, + x = this, + a = x.s; + + y = new BigNumber(y, b); + b = y.s; + + // Either NaN? + if (!a || !b) return new BigNumber(NaN); + + // Signs differ? + if (a != b) { + y.s = -b; + return x.plus(y); + } + + var xe = x.e / LOG_BASE, + ye = y.e / LOG_BASE, + xc = x.c, + yc = y.c; + + if (!xe || !ye) { + + // Either Infinity? + if (!xc || !yc) return xc ? (y.s = -b, y) : new BigNumber(yc ? x : NaN); + + // Either zero? + if (!xc[0] || !yc[0]) { + + // Return y if y is non-zero, x if x is non-zero, or zero if both are zero. + return yc[0] ? (y.s = -b, y) : new BigNumber(xc[0] ? x : + + // IEEE 754 (2008) 6.3: n - n = -0 when rounding to -Infinity + ROUNDING_MODE == 3 ? -0 : 0); + } + } + + xe = bitFloor(xe); + ye = bitFloor(ye); + xc = xc.slice(); + + // Determine which is the bigger number. + if (a = xe - ye) { + + if (xLTy = a < 0) { + a = -a; + t = xc; + } else { + ye = xe; + t = yc; + } + + t.reverse(); + + // Prepend zeros to equalise exponents. + for (b = a; b--; t.push(0)); + t.reverse(); + } else { + + // Exponents equal. Check digit by digit. + j = (xLTy = (a = xc.length) < (b = yc.length)) ? a : b; + + for (a = b = 0; b < j; b++) { + + if (xc[b] != yc[b]) { + xLTy = xc[b] < yc[b]; + break; + } + } + } + + // x < y? Point xc to the array of the bigger number. + if (xLTy) { + t = xc; + xc = yc; + yc = t; + y.s = -y.s; + } + + b = (j = yc.length) - (i = xc.length); + + // Append zeros to xc if shorter. + // No need to add zeros to yc if shorter as subtract only needs to start at yc.length. + if (b > 0) for (; b--; xc[i++] = 0); + b = BASE - 1; + + // Subtract yc from xc. + for (; j > a;) { + + if (xc[--j] < yc[j]) { + for (i = j; i && !xc[--i]; xc[i] = b); + --xc[i]; + xc[j] += BASE; + } + + xc[j] -= yc[j]; + } + + // Remove leading zeros and adjust exponent accordingly. + for (; xc[0] == 0; xc.splice(0, 1), --ye); + + // Zero? + if (!xc[0]) { + + // Following IEEE 754 (2008) 6.3, + // n - n = +0 but n - n = -0 when rounding towards -Infinity. + y.s = ROUNDING_MODE == 3 ? -1 : 1; + y.c = [y.e = 0]; + return y; + } + + // No need to check for Infinity as +x - +y != Infinity && -x - -y != Infinity + // for finite x and y. + return normalise(y, xc, ye); + }; + + + /* + * n % 0 = N + * n % N = N + * n % I = n + * 0 % n = 0 + * -0 % n = -0 + * 0 % 0 = N + * 0 % N = N + * 0 % I = 0 + * N % n = N + * N % 0 = N + * N % N = N + * N % I = N + * I % n = N + * I % 0 = N + * I % N = N + * I % I = N + * + * Return a new BigNumber whose value is the value of this BigNumber modulo the value of + * BigNumber(y, b). The result depends on the value of MODULO_MODE. + */ + P.modulo = P.mod = function (y, b) { + var q, s, + x = this; + + y = new BigNumber(y, b); + + // Return NaN if x is Infinity or NaN, or y is NaN or zero. + if (!x.c || !y.s || y.c && !y.c[0]) { + return new BigNumber(NaN); + + // Return x if y is Infinity or x is zero. + } else if (!y.c || x.c && !x.c[0]) { + return new BigNumber(x); + } + + if (MODULO_MODE == 9) { + + // Euclidian division: q = sign(y) * floor(x / abs(y)) + // r = x - qy where 0 <= r < abs(y) + s = y.s; + y.s = 1; + q = div(x, y, 0, 3); + y.s = s; + q.s *= s; + } else { + q = div(x, y, 0, MODULO_MODE); + } + + y = x.minus(q.times(y)); + + // To match JavaScript %, ensure sign of zero is sign of dividend. + if (!y.c[0] && MODULO_MODE == 1) y.s = x.s; + + return y; + }; + + + /* + * n * 0 = 0 + * n * N = N + * n * I = I + * 0 * n = 0 + * 0 * 0 = 0 + * 0 * N = N + * 0 * I = N + * N * n = N + * N * 0 = N + * N * N = N + * N * I = N + * I * n = I + * I * 0 = N + * I * N = N + * I * I = I + * + * Return a new BigNumber whose value is the value of this BigNumber multiplied by the value + * of BigNumber(y, b). + */ + P.multipliedBy = P.times = function (y, b) { + var c, e, i, j, k, m, xcL, xlo, xhi, ycL, ylo, yhi, zc, + base, sqrtBase, + x = this, + xc = x.c, + yc = (y = new BigNumber(y, b)).c; + + // Either NaN, ±Infinity or ±0? + if (!xc || !yc || !xc[0] || !yc[0]) { + + // Return NaN if either is NaN, or one is 0 and the other is Infinity. + if (!x.s || !y.s || xc && !xc[0] && !yc || yc && !yc[0] && !xc) { + y.c = y.e = y.s = null; + } else { + y.s *= x.s; + + // Return ±Infinity if either is ±Infinity. + if (!xc || !yc) { + y.c = y.e = null; + + // Return ±0 if either is ±0. + } else { + y.c = [0]; + y.e = 0; + } + } + + return y; + } + + e = bitFloor(x.e / LOG_BASE) + bitFloor(y.e / LOG_BASE); + y.s *= x.s; + xcL = xc.length; + ycL = yc.length; + + // Ensure xc points to longer array and xcL to its length. + if (xcL < ycL) { + zc = xc; + xc = yc; + yc = zc; + i = xcL; + xcL = ycL; + ycL = i; + } + + // Initialise the result array with zeros. + for (i = xcL + ycL, zc = []; i--; zc.push(0)); + + base = BASE; + sqrtBase = SQRT_BASE; + + for (i = ycL; --i >= 0;) { + c = 0; + ylo = yc[i] % sqrtBase; + yhi = yc[i] / sqrtBase | 0; + + for (k = xcL, j = i + k; j > i;) { + xlo = xc[--k] % sqrtBase; + xhi = xc[k] / sqrtBase | 0; + m = yhi * xlo + xhi * ylo; + xlo = ylo * xlo + ((m % sqrtBase) * sqrtBase) + zc[j] + c; + c = (xlo / base | 0) + (m / sqrtBase | 0) + yhi * xhi; + zc[j--] = xlo % base; + } + + zc[j] = c; + } + + if (c) { + ++e; + } else { + zc.splice(0, 1); + } + + return normalise(y, zc, e); + }; + + + /* + * Return a new BigNumber whose value is the value of this BigNumber negated, + * i.e. multiplied by -1. + */ + P.negated = function () { + var x = new BigNumber(this); + x.s = -x.s || null; + return x; + }; + + + /* + * n + 0 = n + * n + N = N + * n + I = I + * 0 + n = n + * 0 + 0 = 0 + * 0 + N = N + * 0 + I = I + * N + n = N + * N + 0 = N + * N + N = N + * N + I = N + * I + n = I + * I + 0 = I + * I + N = N + * I + I = I + * + * Return a new BigNumber whose value is the value of this BigNumber plus the value of + * BigNumber(y, b). + */ + P.plus = function (y, b) { + var t, + x = this, + a = x.s; + + y = new BigNumber(y, b); + b = y.s; + + // Either NaN? + if (!a || !b) return new BigNumber(NaN); + + // Signs differ? + if (a != b) { + y.s = -b; + return x.minus(y); + } + + var xe = x.e / LOG_BASE, + ye = y.e / LOG_BASE, + xc = x.c, + yc = y.c; + + if (!xe || !ye) { + + // Return ±Infinity if either ±Infinity. + if (!xc || !yc) return new BigNumber(a / 0); + + // Either zero? + // Return y if y is non-zero, x if x is non-zero, or zero if both are zero. + if (!xc[0] || !yc[0]) return yc[0] ? y : new BigNumber(xc[0] ? x : a * 0); + } + + xe = bitFloor(xe); + ye = bitFloor(ye); + xc = xc.slice(); + + // Prepend zeros to equalise exponents. Faster to use reverse then do unshifts. + if (a = xe - ye) { + if (a > 0) { + ye = xe; + t = yc; + } else { + a = -a; + t = xc; + } + + t.reverse(); + for (; a--; t.push(0)); + t.reverse(); + } + + a = xc.length; + b = yc.length; + + // Point xc to the longer array, and b to the shorter length. + if (a - b < 0) { + t = yc; + yc = xc; + xc = t; + b = a; + } + + // Only start adding at yc.length - 1 as the further digits of xc can be ignored. + for (a = 0; b;) { + a = (xc[--b] = xc[b] + yc[b] + a) / BASE | 0; + xc[b] = BASE === xc[b] ? 0 : xc[b] % BASE; + } + + if (a) { + xc = [a].concat(xc); + ++ye; + } + + // No need to check for zero, as +x + +y != 0 && -x + -y != 0 + // ye = MAX_EXP + 1 possible + return normalise(y, xc, ye); + }; + + + /* + * If sd is undefined or null or true or false, return the number of significant digits of + * the value of this BigNumber, or null if the value of this BigNumber is ±Infinity or NaN. + * If sd is true include integer-part trailing zeros in the count. + * + * Otherwise, if sd is a number, return a new BigNumber whose value is the value of this + * BigNumber rounded to a maximum of sd significant digits using rounding mode rm, or + * ROUNDING_MODE if rm is omitted. + * + * sd {number|boolean} number: significant digits: integer, 1 to MAX inclusive. + * boolean: whether to count integer-part trailing zeros: true or false. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {sd|rm}' + */ + P.precision = P.sd = function (sd, rm) { + var c, n, v, + x = this; + + if (sd != null && sd !== !!sd) { + intCheck(sd, 1, MAX); + if (rm == null) rm = ROUNDING_MODE; + else intCheck(rm, 0, 8); + + return round(new BigNumber(x), sd, rm); + } + + if (!(c = x.c)) return null; + v = c.length - 1; + n = v * LOG_BASE + 1; + + if (v = c[v]) { + + // Subtract the number of trailing zeros of the last element. + for (; v % 10 == 0; v /= 10, n--); + + // Add the number of digits of the first element. + for (v = c[0]; v >= 10; v /= 10, n++); + } + + if (sd && x.e + 1 > n) n = x.e + 1; + + return n; + }; + + + /* + * Return a new BigNumber whose value is the value of this BigNumber shifted by k places + * (powers of 10). Shift to the right if n > 0, and to the left if n < 0. + * + * k {number} Integer, -MAX_SAFE_INTEGER to MAX_SAFE_INTEGER inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {k}' + */ + P.shiftedBy = function (k) { + intCheck(k, -MAX_SAFE_INTEGER, MAX_SAFE_INTEGER); + return this.times('1e' + k); + }; + + + /* + * sqrt(-n) = N + * sqrt(N) = N + * sqrt(-I) = N + * sqrt(I) = I + * sqrt(0) = 0 + * sqrt(-0) = -0 + * + * Return a new BigNumber whose value is the square root of the value of this BigNumber, + * rounded according to DECIMAL_PLACES and ROUNDING_MODE. + */ + P.squareRoot = P.sqrt = function () { + var m, n, r, rep, t, + x = this, + c = x.c, + s = x.s, + e = x.e, + dp = DECIMAL_PLACES + 4, + half = new BigNumber('0.5'); + + // Negative/NaN/Infinity/zero? + if (s !== 1 || !c || !c[0]) { + return new BigNumber(!s || s < 0 && (!c || c[0]) ? NaN : c ? x : 1 / 0); + } + + // Initial estimate. + s = Math.sqrt(+valueOf(x)); + + // Math.sqrt underflow/overflow? + // Pass x to Math.sqrt as integer, then adjust the exponent of the result. + if (s == 0 || s == 1 / 0) { + n = coeffToString(c); + if ((n.length + e) % 2 == 0) n += '0'; + s = Math.sqrt(+n); + e = bitFloor((e + 1) / 2) - (e < 0 || e % 2); + + if (s == 1 / 0) { + n = '5e' + e; + } else { + n = s.toExponential(); + n = n.slice(0, n.indexOf('e') + 1) + e; + } + + r = new BigNumber(n); + } else { + r = new BigNumber(s + ''); + } + + // Check for zero. + // r could be zero if MIN_EXP is changed after the this value was created. + // This would cause a division by zero (x/t) and hence Infinity below, which would cause + // coeffToString to throw. + if (r.c[0]) { + e = r.e; + s = e + dp; + if (s < 3) s = 0; + + // Newton-Raphson iteration. + for (; ;) { + t = r; + r = half.times(t.plus(div(x, t, dp, 1))); + + if (coeffToString(t.c).slice(0, s) === (n = coeffToString(r.c)).slice(0, s)) { + + // The exponent of r may here be one less than the final result exponent, + // e.g 0.0009999 (e-4) --> 0.001 (e-3), so adjust s so the rounding digits + // are indexed correctly. + if (r.e < e) --s; + n = n.slice(s - 3, s + 1); + + // The 4th rounding digit may be in error by -1 so if the 4 rounding digits + // are 9999 or 4999 (i.e. approaching a rounding boundary) continue the + // iteration. + if (n == '9999' || !rep && n == '4999') { + + // On the first iteration only, check to see if rounding up gives the + // exact result as the nines may infinitely repeat. + if (!rep) { + round(t, t.e + DECIMAL_PLACES + 2, 0); + + if (t.times(t).eq(x)) { + r = t; + break; + } + } + + dp += 4; + s += 4; + rep = 1; + } else { + + // If rounding digits are null, 0{0,4} or 50{0,3}, check for exact + // result. If not, then there are further digits and m will be truthy. + if (!+n || !+n.slice(1) && n.charAt(0) == '5') { + + // Truncate to the first rounding digit. + round(r, r.e + DECIMAL_PLACES + 2, 1); + m = !r.times(r).eq(x); + } + + break; + } + } + } + } + + return round(r, r.e + DECIMAL_PLACES + 1, ROUNDING_MODE, m); + }; + + + /* + * Return a string representing the value of this BigNumber in exponential notation and + * rounded using ROUNDING_MODE to dp fixed decimal places. + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + */ + P.toExponential = function (dp, rm) { + if (dp != null) { + intCheck(dp, 0, MAX); + dp++; + } + return format(this, dp, rm, 1); + }; + + + /* + * Return a string representing the value of this BigNumber in fixed-point notation rounding + * to dp fixed decimal places using rounding mode rm, or ROUNDING_MODE if rm is omitted. + * + * Note: as with JavaScript's number type, (-0).toFixed(0) is '0', + * but e.g. (-0.00001).toFixed(0) is '-0'. + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + */ + P.toFixed = function (dp, rm) { + if (dp != null) { + intCheck(dp, 0, MAX); + dp = dp + this.e + 1; + } + return format(this, dp, rm); + }; + + + /* + * Return a string representing the value of this BigNumber in fixed-point notation rounded + * using rm or ROUNDING_MODE to dp decimal places, and formatted according to the properties + * of the format or FORMAT object (see BigNumber.set). + * + * The formatting object may contain some or all of the properties shown below. + * + * FORMAT = { + * prefix: '', + * groupSize: 3, + * secondaryGroupSize: 0, + * groupSeparator: ',', + * decimalSeparator: '.', + * fractionGroupSize: 0, + * fractionGroupSeparator: '\xA0', // non-breaking space + * suffix: '' + * }; + * + * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * [format] {object} Formatting options. See FORMAT pbject above. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' + * '[BigNumber Error] Argument not an object: {format}' + */ + P.toFormat = function (dp, rm, format) { + var str, + x = this; + + if (format == null) { + if (dp != null && rm && typeof rm == 'object') { + format = rm; + rm = null; + } else if (dp && typeof dp == 'object') { + format = dp; + dp = rm = null; + } else { + format = FORMAT; + } + } else if (typeof format != 'object') { + throw Error + (bignumberError + 'Argument not an object: ' + format); + } + + str = x.toFixed(dp, rm); + + if (x.c) { + var i, + arr = str.split('.'), + g1 = +format.groupSize, + g2 = +format.secondaryGroupSize, + groupSeparator = format.groupSeparator || '', + intPart = arr[0], + fractionPart = arr[1], + isNeg = x.s < 0, + intDigits = isNeg ? intPart.slice(1) : intPart, + len = intDigits.length; + + if (g2) { + i = g1; + g1 = g2; + g2 = i; + len -= i; + } + + if (g1 > 0 && len > 0) { + i = len % g1 || g1; + intPart = intDigits.substr(0, i); + for (; i < len; i += g1) intPart += groupSeparator + intDigits.substr(i, g1); + if (g2 > 0) intPart += groupSeparator + intDigits.slice(i); + if (isNeg) intPart = '-' + intPart; + } + + str = fractionPart + ? intPart + (format.decimalSeparator || '') + ((g2 = +format.fractionGroupSize) + ? fractionPart.replace(new RegExp('\\d{' + g2 + '}\\B', 'g'), + '$&' + (format.fractionGroupSeparator || '')) + : fractionPart) + : intPart; + } + + return (format.prefix || '') + str + (format.suffix || ''); + }; + + + /* + * Return an array of two BigNumbers representing the value of this BigNumber as a simple + * fraction with an integer numerator and an integer denominator. + * The denominator will be a positive non-zero value less than or equal to the specified + * maximum denominator. If a maximum denominator is not specified, the denominator will be + * the lowest value necessary to represent the number exactly. + * + * [md] {number|string|BigNumber} Integer >= 1, or Infinity. The maximum denominator. + * + * '[BigNumber Error] Argument {not an integer|out of range} : {md}' + */ + P.toFraction = function (md) { + var d, d0, d1, d2, e, exp, n, n0, n1, q, r, s, + x = this, + xc = x.c; + + if (md != null) { + n = new BigNumber(md); + + // Throw if md is less than one or is not an integer, unless it is Infinity. + if (!n.isInteger() && (n.c || n.s !== 1) || n.lt(ONE)) { + throw Error + (bignumberError + 'Argument ' + + (n.isInteger() ? 'out of range: ' : 'not an integer: ') + valueOf(n)); + } + } + + if (!xc) return new BigNumber(x); + + d = new BigNumber(ONE); + n1 = d0 = new BigNumber(ONE); + d1 = n0 = new BigNumber(ONE); + s = coeffToString(xc); + + // Determine initial denominator. + // d is a power of 10 and the minimum max denominator that specifies the value exactly. + e = d.e = s.length - x.e - 1; + d.c[0] = POWS_TEN[(exp = e % LOG_BASE) < 0 ? LOG_BASE + exp : exp]; + md = !md || n.comparedTo(d) > 0 ? (e > 0 ? d : n1) : n; + + exp = MAX_EXP; + MAX_EXP = 1 / 0; + n = new BigNumber(s); + + // n0 = d1 = 0 + n0.c[0] = 0; + + for (; ;) { + q = div(n, d, 0, 1); + d2 = d0.plus(q.times(d1)); + if (d2.comparedTo(md) == 1) break; + d0 = d1; + d1 = d2; + n1 = n0.plus(q.times(d2 = n1)); + n0 = d2; + d = n.minus(q.times(d2 = d)); + n = d2; + } + + d2 = div(md.minus(d0), d1, 0, 1); + n0 = n0.plus(d2.times(n1)); + d0 = d0.plus(d2.times(d1)); + n0.s = n1.s = x.s; + e = e * 2; + + // Determine which fraction is closer to x, n0/d0 or n1/d1 + r = div(n1, d1, e, ROUNDING_MODE).minus(x).abs().comparedTo( + div(n0, d0, e, ROUNDING_MODE).minus(x).abs()) < 1 ? [n1, d1] : [n0, d0]; + + MAX_EXP = exp; + + return r; + }; + + + /* + * Return the value of this BigNumber converted to a number primitive. + */ + P.toNumber = function () { + return +valueOf(this); + }; + + + /* + * Return a string representing the value of this BigNumber rounded to sd significant digits + * using rounding mode rm or ROUNDING_MODE. If sd is less than the number of digits + * necessary to represent the integer part of the value in fixed-point notation, then use + * exponential notation. + * + * [sd] {number} Significant digits. Integer, 1 to MAX inclusive. + * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. + * + * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {sd|rm}' + */ + P.toPrecision = function (sd, rm) { + if (sd != null) intCheck(sd, 1, MAX); + return format(this, sd, rm, 2); + }; + + + /* + * Return a string representing the value of this BigNumber in base b, or base 10 if b is + * omitted. If a base is specified, including base 10, round according to DECIMAL_PLACES and + * ROUNDING_MODE. If a base is not specified, and this BigNumber has a positive exponent + * that is equal to or greater than TO_EXP_POS, or a negative exponent equal to or less than + * TO_EXP_NEG, return exponential notation. + * + * [b] {number} Integer, 2 to ALPHABET.length inclusive. + * + * '[BigNumber Error] Base {not a primitive number|not an integer|out of range}: {b}' + */ + P.toString = function (b) { + var str, + n = this, + s = n.s, + e = n.e; + + // Infinity or NaN? + if (e === null) { + if (s) { + str = 'Infinity'; + if (s < 0) str = '-' + str; + } else { + str = 'NaN'; + } + } else { + if (b == null) { + str = e <= TO_EXP_NEG || e >= TO_EXP_POS + ? toExponential(coeffToString(n.c), e) + : toFixedPoint(coeffToString(n.c), e, '0'); + } else if (b === 10 && alphabetHasNormalDecimalDigits) { + n = round(new BigNumber(n), DECIMAL_PLACES + e + 1, ROUNDING_MODE); + str = toFixedPoint(coeffToString(n.c), n.e, '0'); + } else { + intCheck(b, 2, ALPHABET.length, 'Base'); + str = convertBase(toFixedPoint(coeffToString(n.c), e, '0'), 10, b, s, true); + } + + if (s < 0 && n.c[0]) str = '-' + str; + } + + return str; + }; + + + /* + * Return as toString, but do not accept a base argument, and include the minus sign for + * negative zero. + */ + P.valueOf = P.toJSON = function () { + return valueOf(this); + }; + + + P._isBigNumber = true; + + if (configObject != null) BigNumber.set(configObject); + + return BigNumber; + } + + + // PRIVATE HELPER FUNCTIONS + + // These functions don't need access to variables, + // e.g. DECIMAL_PLACES, in the scope of the `clone` function above. + + + function bitFloor(n) { + var i = n | 0; + return n > 0 || n === i ? i : i - 1; + } + + + // Return a coefficient array as a string of base 10 digits. + function coeffToString(a) { + var s, z, + i = 1, + j = a.length, + r = a[0] + ''; + + for (; i < j;) { + s = a[i++] + ''; + z = LOG_BASE - s.length; + for (; z--; s = '0' + s); + r += s; + } + + // Determine trailing zeros. + for (j = r.length; r.charCodeAt(--j) === 48;); + + return r.slice(0, j + 1 || 1); + } + + + // Compare the value of BigNumbers x and y. + function compare(x, y) { + var a, b, + xc = x.c, + yc = y.c, + i = x.s, + j = y.s, + k = x.e, + l = y.e; + + // Either NaN? + if (!i || !j) return null; + + a = xc && !xc[0]; + b = yc && !yc[0]; + + // Either zero? + if (a || b) return a ? b ? 0 : -j : i; + + // Signs differ? + if (i != j) return i; + + a = i < 0; + b = k == l; + + // Either Infinity? + if (!xc || !yc) return b ? 0 : !xc ^ a ? 1 : -1; + + // Compare exponents. + if (!b) return k > l ^ a ? 1 : -1; + + j = (k = xc.length) < (l = yc.length) ? k : l; + + // Compare digit by digit. + for (i = 0; i < j; i++) if (xc[i] != yc[i]) return xc[i] > yc[i] ^ a ? 1 : -1; + + // Compare lengths. + return k == l ? 0 : k > l ^ a ? 1 : -1; + } + + + /* + * Check that n is a primitive number, an integer, and in range, otherwise throw. + */ + function intCheck(n, min, max, name) { + if (n < min || n > max || n !== mathfloor(n)) { + throw Error + (bignumberError + (name || 'Argument') + (typeof n == 'number' + ? n < min || n > max ? ' out of range: ' : ' not an integer: ' + : ' not a primitive number: ') + String(n)); + } + } + + + // Assumes finite n. + function isOdd(n) { + var k = n.c.length - 1; + return bitFloor(n.e / LOG_BASE) == k && n.c[k] % 2 != 0; + } + + + function toExponential(str, e) { + return (str.length > 1 ? str.charAt(0) + '.' + str.slice(1) : str) + + (e < 0 ? 'e' : 'e+') + e; + } + + + function toFixedPoint(str, e, z) { + var len, zs; + + // Negative exponent? + if (e < 0) { + + // Prepend zeros. + for (zs = z + '.'; ++e; zs += z); + str = zs + str; + + // Positive exponent + } else { + len = str.length; + + // Append zeros. + if (++e > len) { + for (zs = z, e -= len; --e; zs += z); + str += zs; + } else if (e < len) { + str = str.slice(0, e) + '.' + str.slice(e); + } + } + + return str; + } + + + // EXPORT + + + BigNumber = clone(); + BigNumber['default'] = BigNumber.BigNumber = BigNumber; + + // AMD. + if (typeof define == 'function' && define.amd) { + define(function () { return BigNumber; }); + + // Node.js and other environments that support module.exports. + } else if ( true && module.exports) { + module.exports = BigNumber; + + // Browser. + } else { + if (!globalObject) { + globalObject = typeof self != 'undefined' && self ? self : window; + } + + globalObject.BigNumber = BigNumber; + } +})(this); + + /***/ }), /***/ 4691: @@ -51001,6 +54580,55 @@ function expand(str, isTop) { +/***/ }), + +/***/ 9732: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +/*jshint node:true */ + +var Buffer = (__nccwpck_require__(181).Buffer); // browserify +var SlowBuffer = (__nccwpck_require__(181).SlowBuffer); + +module.exports = bufferEq; + +function bufferEq(a, b) { + + // shortcutting on type is necessary for correctness + if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) { + return false; + } + + // buffer sizes should be well-known information, so despite this + // shortcutting, it doesn't leak any information about the *contents* of the + // buffers. + if (a.length !== b.length) { + return false; + } + + var c = 0; + for (var i = 0; i < a.length; i++) { + /*jshint bitwise:false */ + c |= a[i] ^ b[i]; // XOR + } + return c === 0; +} + +bufferEq.install = function() { + Buffer.prototype.equal = SlowBuffer.prototype.equal = function equal(that) { + return bufferEq(this, that); + }; +}; + +var origBufEqual = Buffer.prototype.equal; +var origSlowBufEqual = SlowBuffer.prototype.equal; +bufferEq.restore = function() { + Buffer.prototype.equal = origBufEqual; + SlowBuffer.prototype.equal = origSlowBufEqual; +}; + + /***/ }), /***/ 5630: @@ -51236,6 +54864,850 @@ var isArray = Array.isArray || function (xs) { }; +/***/ }), + +/***/ 6110: +/***/ ((module, exports, __nccwpck_require__) => { + +/* eslint-env browser */ + +/** + * This is the web browser implementation of `debug()`. + */ + +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.storage = localstorage(); +exports.destroy = (() => { + let warned = false; + + return () => { + if (!warned) { + warned = true; + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + }; +})(); + +/** + * Colors. + */ + +exports.colors = [ + '#0000CC', + '#0000FF', + '#0033CC', + '#0033FF', + '#0066CC', + '#0066FF', + '#0099CC', + '#0099FF', + '#00CC00', + '#00CC33', + '#00CC66', + '#00CC99', + '#00CCCC', + '#00CCFF', + '#3300CC', + '#3300FF', + '#3333CC', + '#3333FF', + '#3366CC', + '#3366FF', + '#3399CC', + '#3399FF', + '#33CC00', + '#33CC33', + '#33CC66', + '#33CC99', + '#33CCCC', + '#33CCFF', + '#6600CC', + '#6600FF', + '#6633CC', + '#6633FF', + '#66CC00', + '#66CC33', + '#9900CC', + '#9900FF', + '#9933CC', + '#9933FF', + '#99CC00', + '#99CC33', + '#CC0000', + '#CC0033', + '#CC0066', + '#CC0099', + '#CC00CC', + '#CC00FF', + '#CC3300', + '#CC3333', + '#CC3366', + '#CC3399', + '#CC33CC', + '#CC33FF', + '#CC6600', + '#CC6633', + '#CC9900', + '#CC9933', + '#CCCC00', + '#CCCC33', + '#FF0000', + '#FF0033', + '#FF0066', + '#FF0099', + '#FF00CC', + '#FF00FF', + '#FF3300', + '#FF3333', + '#FF3366', + '#FF3399', + '#FF33CC', + '#FF33FF', + '#FF6600', + '#FF6633', + '#FF9900', + '#FF9933', + '#FFCC00', + '#FFCC33' +]; + +/** + * Currently only WebKit-based Web Inspectors, Firefox >= v31, + * and the Firebug extension (any Firefox version) are known + * to support "%c" CSS customizations. + * + * TODO: add a `localStorage` variable to explicitly enable/disable colors + */ + +// eslint-disable-next-line complexity +function useColors() { + // NB: In an Electron preload script, document will be defined but not fully + // initialized. Since we know we're in Chrome, we'll just detect this case + // explicitly + if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) { + return true; + } + + // Internet Explorer and Edge do not support colors. + if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { + return false; + } + + // Is webkit? http://stackoverflow.com/a/16459606/376773 + // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 + return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) || + // Is firebug? http://stackoverflow.com/a/398120/376773 + (typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) || + // Is firefox >= v31? + // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31) || + // Double check webkit in userAgent just in case we are in a worker + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)); +} + +/** + * Colorize log arguments if enabled. + * + * @api public + */ + +function formatArgs(args) { + args[0] = (this.useColors ? '%c' : '') + + this.namespace + + (this.useColors ? ' %c' : ' ') + + args[0] + + (this.useColors ? '%c ' : ' ') + + '+' + module.exports.humanize(this.diff); + + if (!this.useColors) { + return; + } + + const c = 'color: ' + this.color; + args.splice(1, 0, c, 'color: inherit'); + + // The final "%c" is somewhat tricky, because there could be other + // arguments passed either before or after the %c, so we need to + // figure out the correct index to insert the CSS into + let index = 0; + let lastC = 0; + args[0].replace(/%[a-zA-Z%]/g, match => { + if (match === '%%') { + return; + } + index++; + if (match === '%c') { + // We only are interested in the *last* %c + // (the user may have provided their own) + lastC = index; + } + }); + + args.splice(lastC, 0, c); +} + +/** + * Invokes `console.debug()` when available. + * No-op when `console.debug` is not a "function". + * If `console.debug` is not available, falls back + * to `console.log`. + * + * @api public + */ +exports.log = console.debug || console.log || (() => {}); + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + try { + if (namespaces) { + exports.storage.setItem('debug', namespaces); + } else { + exports.storage.removeItem('debug'); + } + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ +function load() { + let r; + try { + r = exports.storage.getItem('debug'); + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } + + // If debug isn't set in LS, and we're in Electron, try to load $DEBUG + if (!r && typeof process !== 'undefined' && 'env' in process) { + r = process.env.DEBUG; + } + + return r; +} + +/** + * Localstorage attempts to return the localstorage. + * + * This is necessary because safari throws + * when a user disables cookies/localstorage + * and you attempt to access it. + * + * @return {LocalStorage} + * @api private + */ + +function localstorage() { + try { + // TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context + // The Browser also has localStorage in the global context. + return localStorage; + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} + +module.exports = __nccwpck_require__(897)(exports); + +const {formatters} = module.exports; + +/** + * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. + */ + +formatters.j = function (v) { + try { + return JSON.stringify(v); + } catch (error) { + return '[UnexpectedJSONParseError]: ' + error.message; + } +}; + + +/***/ }), + +/***/ 897: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + + +/** + * This is the common logic for both the Node.js and web browser + * implementations of `debug()`. + */ + +function setup(env) { + createDebug.debug = createDebug; + createDebug.default = createDebug; + createDebug.coerce = coerce; + createDebug.disable = disable; + createDebug.enable = enable; + createDebug.enabled = enabled; + createDebug.humanize = __nccwpck_require__(744); + createDebug.destroy = destroy; + + Object.keys(env).forEach(key => { + createDebug[key] = env[key]; + }); + + /** + * The currently active debug mode names, and names to skip. + */ + + createDebug.names = []; + createDebug.skips = []; + + /** + * Map of special "%n" handling functions, for the debug "format" argument. + * + * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". + */ + createDebug.formatters = {}; + + /** + * Selects a color for a debug namespace + * @param {String} namespace The namespace string for the debug instance to be colored + * @return {Number|String} An ANSI color code for the given namespace + * @api private + */ + function selectColor(namespace) { + let hash = 0; + + for (let i = 0; i < namespace.length; i++) { + hash = ((hash << 5) - hash) + namespace.charCodeAt(i); + hash |= 0; // Convert to 32bit integer + } + + return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; + } + createDebug.selectColor = selectColor; + + /** + * Create a debugger with the given `namespace`. + * + * @param {String} namespace + * @return {Function} + * @api public + */ + function createDebug(namespace) { + let prevTime; + let enableOverride = null; + let namespacesCache; + let enabledCache; + + function debug(...args) { + // Disabled? + if (!debug.enabled) { + return; + } + + const self = debug; + + // Set `diff` timestamp + const curr = Number(new Date()); + const ms = curr - (prevTime || curr); + self.diff = ms; + self.prev = prevTime; + self.curr = curr; + prevTime = curr; + + args[0] = createDebug.coerce(args[0]); + + if (typeof args[0] !== 'string') { + // Anything else let's inspect with %O + args.unshift('%O'); + } + + // Apply any `formatters` transformations + let index = 0; + args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => { + // If we encounter an escaped % then don't increase the array index + if (match === '%%') { + return '%'; + } + index++; + const formatter = createDebug.formatters[format]; + if (typeof formatter === 'function') { + const val = args[index]; + match = formatter.call(self, val); + + // Now we need to remove `args[index]` since it's inlined in the `format` + args.splice(index, 1); + index--; + } + return match; + }); + + // Apply env-specific formatting (colors, etc.) + createDebug.formatArgs.call(self, args); + + const logFn = self.log || createDebug.log; + logFn.apply(self, args); + } + + debug.namespace = namespace; + debug.useColors = createDebug.useColors(); + debug.color = createDebug.selectColor(namespace); + debug.extend = extend; + debug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release. + + Object.defineProperty(debug, 'enabled', { + enumerable: true, + configurable: false, + get: () => { + if (enableOverride !== null) { + return enableOverride; + } + if (namespacesCache !== createDebug.namespaces) { + namespacesCache = createDebug.namespaces; + enabledCache = createDebug.enabled(namespace); + } + + return enabledCache; + }, + set: v => { + enableOverride = v; + } + }); + + // Env-specific initialization logic for debug instances + if (typeof createDebug.init === 'function') { + createDebug.init(debug); + } + + return debug; + } + + function extend(namespace, delimiter) { + const newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace); + newDebug.log = this.log; + return newDebug; + } + + /** + * Enables a debug mode by namespaces. This can include modes + * separated by a colon and wildcards. + * + * @param {String} namespaces + * @api public + */ + function enable(namespaces) { + createDebug.save(namespaces); + createDebug.namespaces = namespaces; + + createDebug.names = []; + createDebug.skips = []; + + let i; + const split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/); + const len = split.length; + + for (i = 0; i < len; i++) { + if (!split[i]) { + // ignore empty strings + continue; + } + + namespaces = split[i].replace(/\*/g, '.*?'); + + if (namespaces[0] === '-') { + createDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$')); + } else { + createDebug.names.push(new RegExp('^' + namespaces + '$')); + } + } + } + + /** + * Disable debug output. + * + * @return {String} namespaces + * @api public + */ + function disable() { + const namespaces = [ + ...createDebug.names.map(toNamespace), + ...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace) + ].join(','); + createDebug.enable(''); + return namespaces; + } + + /** + * Returns true if the given mode name is enabled, false otherwise. + * + * @param {String} name + * @return {Boolean} + * @api public + */ + function enabled(name) { + if (name[name.length - 1] === '*') { + return true; + } + + let i; + let len; + + for (i = 0, len = createDebug.skips.length; i < len; i++) { + if (createDebug.skips[i].test(name)) { + return false; + } + } + + for (i = 0, len = createDebug.names.length; i < len; i++) { + if (createDebug.names[i].test(name)) { + return true; + } + } + + return false; + } + + /** + * Convert regexp to namespace + * + * @param {RegExp} regxep + * @return {String} namespace + * @api private + */ + function toNamespace(regexp) { + return regexp.toString() + .substring(2, regexp.toString().length - 2) + .replace(/\.\*\?$/, '*'); + } + + /** + * Coerce `val`. + * + * @param {Mixed} val + * @return {Mixed} + * @api private + */ + function coerce(val) { + if (val instanceof Error) { + return val.stack || val.message; + } + return val; + } + + /** + * XXX DO NOT USE. This is a temporary stub function. + * XXX It WILL be removed in the next major release. + */ + function destroy() { + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + + createDebug.enable(createDebug.load()); + + return createDebug; +} + +module.exports = setup; + + +/***/ }), + +/***/ 2830: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/** + * Detect Electron renderer / nwjs process, which is node, but we should + * treat as a browser. + */ + +if (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) { + module.exports = __nccwpck_require__(6110); +} else { + module.exports = __nccwpck_require__(5108); +} + + +/***/ }), + +/***/ 5108: +/***/ ((module, exports, __nccwpck_require__) => { + +/** + * Module dependencies. + */ + +const tty = __nccwpck_require__(2018); +const util = __nccwpck_require__(9023); + +/** + * This is the Node.js implementation of `debug()`. + */ + +exports.init = init; +exports.log = log; +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.destroy = util.deprecate( + () => {}, + 'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.' +); + +/** + * Colors. + */ + +exports.colors = [6, 2, 3, 4, 5, 1]; + +try { + // Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json) + // eslint-disable-next-line import/no-extraneous-dependencies + const supportsColor = __nccwpck_require__(1450); + + if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) { + exports.colors = [ + 20, + 21, + 26, + 27, + 32, + 33, + 38, + 39, + 40, + 41, + 42, + 43, + 44, + 45, + 56, + 57, + 62, + 63, + 68, + 69, + 74, + 75, + 76, + 77, + 78, + 79, + 80, + 81, + 92, + 93, + 98, + 99, + 112, + 113, + 128, + 129, + 134, + 135, + 148, + 149, + 160, + 161, + 162, + 163, + 164, + 165, + 166, + 167, + 168, + 169, + 170, + 171, + 172, + 173, + 178, + 179, + 184, + 185, + 196, + 197, + 198, + 199, + 200, + 201, + 202, + 203, + 204, + 205, + 206, + 207, + 208, + 209, + 214, + 215, + 220, + 221 + ]; + } +} catch (error) { + // Swallow - we only care if `supports-color` is available; it doesn't have to be. +} + +/** + * Build up the default `inspectOpts` object from the environment variables. + * + * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js + */ + +exports.inspectOpts = Object.keys(process.env).filter(key => { + return /^debug_/i.test(key); +}).reduce((obj, key) => { + // Camel-case + const prop = key + .substring(6) + .toLowerCase() + .replace(/_([a-z])/g, (_, k) => { + return k.toUpperCase(); + }); + + // Coerce string value into JS value + let val = process.env[key]; + if (/^(yes|on|true|enabled)$/i.test(val)) { + val = true; + } else if (/^(no|off|false|disabled)$/i.test(val)) { + val = false; + } else if (val === 'null') { + val = null; + } else { + val = Number(val); + } + + obj[prop] = val; + return obj; +}, {}); + +/** + * Is stdout a TTY? Colored output is enabled when `true`. + */ + +function useColors() { + return 'colors' in exports.inspectOpts ? + Boolean(exports.inspectOpts.colors) : + tty.isatty(process.stderr.fd); +} + +/** + * Adds ANSI color escape codes if enabled. + * + * @api public + */ + +function formatArgs(args) { + const {namespace: name, useColors} = this; + + if (useColors) { + const c = this.color; + const colorCode = '\u001B[3' + (c < 8 ? c : '8;5;' + c); + const prefix = ` ${colorCode};1m${name} \u001B[0m`; + + args[0] = prefix + args[0].split('\n').join('\n' + prefix); + args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\u001B[0m'); + } else { + args[0] = getDate() + name + ' ' + args[0]; + } +} + +function getDate() { + if (exports.inspectOpts.hideDate) { + return ''; + } + return new Date().toISOString() + ' '; +} + +/** + * Invokes `util.format()` with the specified arguments and writes to stderr. + */ + +function log(...args) { + return process.stderr.write(util.format(...args) + '\n'); +} + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + if (namespaces) { + process.env.DEBUG = namespaces; + } else { + // If you set a process.env field to null or undefined, it gets cast to the + // string 'null' or 'undefined'. Just delete instead. + delete process.env.DEBUG; + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ + +function load() { + return process.env.DEBUG; +} + +/** + * Init logic for `debug` instances. + * + * Create a new `inspectOpts` object in case `useColors` is set + * differently for a particular `debug` instance. + */ + +function init(debug) { + debug.inspectOpts = {}; + + const keys = Object.keys(exports.inspectOpts); + for (let i = 0; i < keys.length; i++) { + debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]]; + } +} + +module.exports = __nccwpck_require__(897)(exports); + +const {formatters} = module.exports; + +/** + * Map %o to `util.inspect()`, all on a single line. + */ + +formatters.o = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts) + .split('\n') + .map(str => str.trim()) + .join(' '); +}; + +/** + * Map %O to `util.inspect()`, allowing multiple lines if needed. + */ + +formatters.O = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts); +}; + + /***/ }), /***/ 2710: @@ -51350,6 +55822,15373 @@ DelayedStream.prototype._checkIfMaxDataSizeExceeded = function() { }; +/***/ }), + +/***/ 9112: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var stream = __nccwpck_require__(6131) +var eos = __nccwpck_require__(1424) +var inherits = __nccwpck_require__(9598) +var shift = __nccwpck_require__(4633) + +var SIGNAL_FLUSH = (Buffer.from && Buffer.from !== Uint8Array.from) + ? Buffer.from([0]) + : new Buffer([0]) + +var onuncork = function(self, fn) { + if (self._corked) self.once('uncork', fn) + else fn() +} + +var autoDestroy = function (self, err) { + if (self._autoDestroy) self.destroy(err) +} + +var destroyer = function(self, end) { + return function(err) { + if (err) autoDestroy(self, err.message === 'premature close' ? null : err) + else if (end && !self._ended) self.end() + } +} + +var end = function(ws, fn) { + if (!ws) return fn() + if (ws._writableState && ws._writableState.finished) return fn() + if (ws._writableState) return ws.end(fn) + ws.end() + fn() +} + +var noop = function() {} + +var toStreams2 = function(rs) { + return new (stream.Readable)({objectMode:true, highWaterMark:16}).wrap(rs) +} + +var Duplexify = function(writable, readable, opts) { + if (!(this instanceof Duplexify)) return new Duplexify(writable, readable, opts) + stream.Duplex.call(this, opts) + + this._writable = null + this._readable = null + this._readable2 = null + + this._autoDestroy = !opts || opts.autoDestroy !== false + this._forwardDestroy = !opts || opts.destroy !== false + this._forwardEnd = !opts || opts.end !== false + this._corked = 1 // start corked + this._ondrain = null + this._drained = false + this._forwarding = false + this._unwrite = null + this._unread = null + this._ended = false + + this.destroyed = false + + if (writable) this.setWritable(writable) + if (readable) this.setReadable(readable) +} + +inherits(Duplexify, stream.Duplex) + +Duplexify.obj = function(writable, readable, opts) { + if (!opts) opts = {} + opts.objectMode = true + opts.highWaterMark = 16 + return new Duplexify(writable, readable, opts) +} + +Duplexify.prototype.cork = function() { + if (++this._corked === 1) this.emit('cork') +} + +Duplexify.prototype.uncork = function() { + if (this._corked && --this._corked === 0) this.emit('uncork') +} + +Duplexify.prototype.setWritable = function(writable) { + if (this._unwrite) this._unwrite() + + if (this.destroyed) { + if (writable && writable.destroy) writable.destroy() + return + } + + if (writable === null || writable === false) { + this.end() + return + } + + var self = this + var unend = eos(writable, {writable:true, readable:false}, destroyer(this, this._forwardEnd)) + + var ondrain = function() { + var ondrain = self._ondrain + self._ondrain = null + if (ondrain) ondrain() + } + + var clear = function() { + self._writable.removeListener('drain', ondrain) + unend() + } + + if (this._unwrite) process.nextTick(ondrain) // force a drain on stream reset to avoid livelocks + + this._writable = writable + this._writable.on('drain', ondrain) + this._unwrite = clear + + this.uncork() // always uncork setWritable +} + +Duplexify.prototype.setReadable = function(readable) { + if (this._unread) this._unread() + + if (this.destroyed) { + if (readable && readable.destroy) readable.destroy() + return + } + + if (readable === null || readable === false) { + this.push(null) + this.resume() + return + } + + var self = this + var unend = eos(readable, {writable:false, readable:true}, destroyer(this)) + + var onreadable = function() { + self._forward() + } + + var onend = function() { + self.push(null) + } + + var clear = function() { + self._readable2.removeListener('readable', onreadable) + self._readable2.removeListener('end', onend) + unend() + } + + this._drained = true + this._readable = readable + this._readable2 = readable._readableState ? readable : toStreams2(readable) + this._readable2.on('readable', onreadable) + this._readable2.on('end', onend) + this._unread = clear + + this._forward() +} + +Duplexify.prototype._read = function() { + this._drained = true + this._forward() +} + +Duplexify.prototype._forward = function() { + if (this._forwarding || !this._readable2 || !this._drained) return + this._forwarding = true + + var data + + while (this._drained && (data = shift(this._readable2)) !== null) { + if (this.destroyed) continue + this._drained = this.push(data) + } + + this._forwarding = false +} + +Duplexify.prototype.destroy = function(err, cb) { + if (!cb) cb = noop + if (this.destroyed) return cb(null) + this.destroyed = true + + var self = this + process.nextTick(function() { + self._destroy(err) + cb(null) + }) +} + +Duplexify.prototype._destroy = function(err) { + if (err) { + var ondrain = this._ondrain + this._ondrain = null + if (ondrain) ondrain(err) + else this.emit('error', err) + } + + if (this._forwardDestroy) { + if (this._readable && this._readable.destroy) this._readable.destroy() + if (this._writable && this._writable.destroy) this._writable.destroy() + } + + this.emit('close') +} + +Duplexify.prototype._write = function(data, enc, cb) { + if (this.destroyed) return + if (this._corked) return onuncork(this, this._write.bind(this, data, enc, cb)) + if (data === SIGNAL_FLUSH) return this._finish(cb) + if (!this._writable) return cb() + + if (this._writable.write(data) === false) this._ondrain = cb + else if (!this.destroyed) cb() +} + +Duplexify.prototype._finish = function(cb) { + var self = this + this.emit('preend') + onuncork(this, function() { + end(self._forwardEnd && self._writable, function() { + // haxx to not emit prefinish twice + if (self._writableState.prefinished === false) self._writableState.prefinished = true + self.emit('prefinish') + onuncork(self, cb) + }) + }) +} + +Duplexify.prototype.end = function(data, enc, cb) { + if (typeof data === 'function') return this.end(null, null, data) + if (typeof enc === 'function') return this.end(data, null, enc) + this._ended = true + if (data) this.write(data) + if (!this._writableState.ending && !this._writableState.destroyed) this.write(SIGNAL_FLUSH) + return stream.Writable.prototype.end.call(this, cb) +} + +module.exports = Duplexify + + +/***/ }), + +/***/ 325: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var Buffer = (__nccwpck_require__(3058).Buffer); + +var getParamBytesForAlg = __nccwpck_require__(5028); + +var MAX_OCTET = 0x80, + CLASS_UNIVERSAL = 0, + PRIMITIVE_BIT = 0x20, + TAG_SEQ = 0x10, + TAG_INT = 0x02, + ENCODED_TAG_SEQ = (TAG_SEQ | PRIMITIVE_BIT) | (CLASS_UNIVERSAL << 6), + ENCODED_TAG_INT = TAG_INT | (CLASS_UNIVERSAL << 6); + +function base64Url(base64) { + return base64 + .replace(/=/g, '') + .replace(/\+/g, '-') + .replace(/\//g, '_'); +} + +function signatureAsBuffer(signature) { + if (Buffer.isBuffer(signature)) { + return signature; + } else if ('string' === typeof signature) { + return Buffer.from(signature, 'base64'); + } + + throw new TypeError('ECDSA signature must be a Base64 string or a Buffer'); +} + +function derToJose(signature, alg) { + signature = signatureAsBuffer(signature); + var paramBytes = getParamBytesForAlg(alg); + + // the DER encoded param should at most be the param size, plus a padding + // zero, since due to being a signed integer + var maxEncodedParamLength = paramBytes + 1; + + var inputLength = signature.length; + + var offset = 0; + if (signature[offset++] !== ENCODED_TAG_SEQ) { + throw new Error('Could not find expected "seq"'); + } + + var seqLength = signature[offset++]; + if (seqLength === (MAX_OCTET | 1)) { + seqLength = signature[offset++]; + } + + if (inputLength - offset < seqLength) { + throw new Error('"seq" specified length of "' + seqLength + '", only "' + (inputLength - offset) + '" remaining'); + } + + if (signature[offset++] !== ENCODED_TAG_INT) { + throw new Error('Could not find expected "int" for "r"'); + } + + var rLength = signature[offset++]; + + if (inputLength - offset - 2 < rLength) { + throw new Error('"r" specified length of "' + rLength + '", only "' + (inputLength - offset - 2) + '" available'); + } + + if (maxEncodedParamLength < rLength) { + throw new Error('"r" specified length of "' + rLength + '", max of "' + maxEncodedParamLength + '" is acceptable'); + } + + var rOffset = offset; + offset += rLength; + + if (signature[offset++] !== ENCODED_TAG_INT) { + throw new Error('Could not find expected "int" for "s"'); + } + + var sLength = signature[offset++]; + + if (inputLength - offset !== sLength) { + throw new Error('"s" specified length of "' + sLength + '", expected "' + (inputLength - offset) + '"'); + } + + if (maxEncodedParamLength < sLength) { + throw new Error('"s" specified length of "' + sLength + '", max of "' + maxEncodedParamLength + '" is acceptable'); + } + + var sOffset = offset; + offset += sLength; + + if (offset !== inputLength) { + throw new Error('Expected to consume entire buffer, but "' + (inputLength - offset) + '" bytes remain'); + } + + var rPadding = paramBytes - rLength, + sPadding = paramBytes - sLength; + + var dst = Buffer.allocUnsafe(rPadding + rLength + sPadding + sLength); + + for (offset = 0; offset < rPadding; ++offset) { + dst[offset] = 0; + } + signature.copy(dst, offset, rOffset + Math.max(-rPadding, 0), rOffset + rLength); + + offset = paramBytes; + + for (var o = offset; offset < o + sPadding; ++offset) { + dst[offset] = 0; + } + signature.copy(dst, offset, sOffset + Math.max(-sPadding, 0), sOffset + sLength); + + dst = dst.toString('base64'); + dst = base64Url(dst); + + return dst; +} + +function countPadding(buf, start, stop) { + var padding = 0; + while (start + padding < stop && buf[start + padding] === 0) { + ++padding; + } + + var needsSign = buf[start + padding] >= MAX_OCTET; + if (needsSign) { + --padding; + } + + return padding; +} + +function joseToDer(signature, alg) { + signature = signatureAsBuffer(signature); + var paramBytes = getParamBytesForAlg(alg); + + var signatureBytes = signature.length; + if (signatureBytes !== paramBytes * 2) { + throw new TypeError('"' + alg + '" signatures must be "' + paramBytes * 2 + '" bytes, saw "' + signatureBytes + '"'); + } + + var rPadding = countPadding(signature, 0, paramBytes); + var sPadding = countPadding(signature, paramBytes, signature.length); + var rLength = paramBytes - rPadding; + var sLength = paramBytes - sPadding; + + var rsBytes = 1 + 1 + rLength + 1 + 1 + sLength; + + var shortLength = rsBytes < MAX_OCTET; + + var dst = Buffer.allocUnsafe((shortLength ? 2 : 3) + rsBytes); + + var offset = 0; + dst[offset++] = ENCODED_TAG_SEQ; + if (shortLength) { + // Bit 8 has value "0" + // bits 7-1 give the length. + dst[offset++] = rsBytes; + } else { + // Bit 8 of first octet has value "1" + // bits 7-1 give the number of additional length octets. + dst[offset++] = MAX_OCTET | 1; + // length, base 256 + dst[offset++] = rsBytes & 0xff; + } + dst[offset++] = ENCODED_TAG_INT; + dst[offset++] = rLength; + if (rPadding < 0) { + dst[offset++] = 0; + offset += signature.copy(dst, offset, 0, paramBytes); + } else { + offset += signature.copy(dst, offset, rPadding, paramBytes); + } + dst[offset++] = ENCODED_TAG_INT; + dst[offset++] = sLength; + if (sPadding < 0) { + dst[offset++] = 0; + signature.copy(dst, offset, paramBytes); + } else { + signature.copy(dst, offset, paramBytes + sPadding); + } + + return dst; +} + +module.exports = { + derToJose: derToJose, + joseToDer: joseToDer +}; + + +/***/ }), + +/***/ 5028: +/***/ ((module) => { + +"use strict"; + + +function getParamSize(keySize) { + var result = ((keySize / 8) | 0) + (keySize % 8 === 0 ? 0 : 1); + return result; +} + +var paramBytesForAlg = { + ES256: getParamSize(256), + ES384: getParamSize(384), + ES512: getParamSize(521) +}; + +function getParamBytesForAlg(alg) { + var paramBytes = paramBytesForAlg[alg]; + if (paramBytes) { + return paramBytes; + } + + throw new Error('Unknown algorithm "' + alg + '"'); +} + +module.exports = getParamBytesForAlg; + + +/***/ }), + +/***/ 1424: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var once = __nccwpck_require__(5560); + +var noop = function() {}; + +var isRequest = function(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +}; + +var isChildProcess = function(stream) { + return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3 +}; + +var eos = function(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + + callback = once(callback || noop); + + var ws = stream._writableState; + var rs = stream._readableState; + var readable = opts.readable || (opts.readable !== false && stream.readable); + var writable = opts.writable || (opts.writable !== false && stream.writable); + var cancelled = false; + + var onlegacyfinish = function() { + if (!stream.writable) onfinish(); + }; + + var onfinish = function() { + writable = false; + if (!readable) callback.call(stream); + }; + + var onend = function() { + readable = false; + if (!writable) callback.call(stream); + }; + + var onexit = function(exitCode) { + callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null); + }; + + var onerror = function(err) { + callback.call(stream, err); + }; + + var onclose = function() { + process.nextTick(onclosenexttick); + }; + + var onclosenexttick = function() { + if (cancelled) return; + if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close')); + if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close')); + }; + + var onrequest = function() { + stream.req.on('finish', onfinish); + }; + + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest(); + else stream.on('request', onrequest); + } else if (writable && !ws) { // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } + + if (isChildProcess(stream)) stream.on('exit', onexit); + + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + + return function() { + cancelled = true; + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('exit', onexit); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +}; + +module.exports = eos; + + +/***/ }), + +/***/ 6577: +/***/ ((module, exports) => { + +"use strict"; +/** + * @author Toru Nagashima + * @copyright 2015 Toru Nagashima. All rights reserved. + * See LICENSE file in root directory for full license. + */ + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +/** + * @typedef {object} PrivateData + * @property {EventTarget} eventTarget The event target. + * @property {{type:string}} event The original event object. + * @property {number} eventPhase The current event phase. + * @property {EventTarget|null} currentTarget The current event target. + * @property {boolean} canceled The flag to prevent default. + * @property {boolean} stopped The flag to stop propagation. + * @property {boolean} immediateStopped The flag to stop propagation immediately. + * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null. + * @property {number} timeStamp The unix time. + * @private + */ + +/** + * Private data for event wrappers. + * @type {WeakMap} + * @private + */ +const privateData = new WeakMap(); + +/** + * Cache for wrapper classes. + * @type {WeakMap} + * @private + */ +const wrappers = new WeakMap(); + +/** + * Get private data. + * @param {Event} event The event object to get private data. + * @returns {PrivateData} The private data of the event. + * @private + */ +function pd(event) { + const retv = privateData.get(event); + console.assert( + retv != null, + "'this' is expected an Event object, but got", + event + ); + return retv +} + +/** + * https://dom.spec.whatwg.org/#set-the-canceled-flag + * @param data {PrivateData} private data. + */ +function setCancelFlag(data) { + if (data.passiveListener != null) { + if ( + typeof console !== "undefined" && + typeof console.error === "function" + ) { + console.error( + "Unable to preventDefault inside passive event listener invocation.", + data.passiveListener + ); + } + return + } + if (!data.event.cancelable) { + return + } + + data.canceled = true; + if (typeof data.event.preventDefault === "function") { + data.event.preventDefault(); + } +} + +/** + * @see https://dom.spec.whatwg.org/#interface-event + * @private + */ +/** + * The event wrapper. + * @constructor + * @param {EventTarget} eventTarget The event target of this dispatching. + * @param {Event|{type:string}} event The original event to wrap. + */ +function Event(eventTarget, event) { + privateData.set(this, { + eventTarget, + event, + eventPhase: 2, + currentTarget: eventTarget, + canceled: false, + stopped: false, + immediateStopped: false, + passiveListener: null, + timeStamp: event.timeStamp || Date.now(), + }); + + // https://heycam.github.io/webidl/#Unforgeable + Object.defineProperty(this, "isTrusted", { value: false, enumerable: true }); + + // Define accessors + const keys = Object.keys(event); + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (!(key in this)) { + Object.defineProperty(this, key, defineRedirectDescriptor(key)); + } + } +} + +// Should be enumerable, but class methods are not enumerable. +Event.prototype = { + /** + * The type of this event. + * @type {string} + */ + get type() { + return pd(this).event.type + }, + + /** + * The target of this event. + * @type {EventTarget} + */ + get target() { + return pd(this).eventTarget + }, + + /** + * The target of this event. + * @type {EventTarget} + */ + get currentTarget() { + return pd(this).currentTarget + }, + + /** + * @returns {EventTarget[]} The composed path of this event. + */ + composedPath() { + const currentTarget = pd(this).currentTarget; + if (currentTarget == null) { + return [] + } + return [currentTarget] + }, + + /** + * Constant of NONE. + * @type {number} + */ + get NONE() { + return 0 + }, + + /** + * Constant of CAPTURING_PHASE. + * @type {number} + */ + get CAPTURING_PHASE() { + return 1 + }, + + /** + * Constant of AT_TARGET. + * @type {number} + */ + get AT_TARGET() { + return 2 + }, + + /** + * Constant of BUBBLING_PHASE. + * @type {number} + */ + get BUBBLING_PHASE() { + return 3 + }, + + /** + * The target of this event. + * @type {number} + */ + get eventPhase() { + return pd(this).eventPhase + }, + + /** + * Stop event bubbling. + * @returns {void} + */ + stopPropagation() { + const data = pd(this); + + data.stopped = true; + if (typeof data.event.stopPropagation === "function") { + data.event.stopPropagation(); + } + }, + + /** + * Stop event bubbling. + * @returns {void} + */ + stopImmediatePropagation() { + const data = pd(this); + + data.stopped = true; + data.immediateStopped = true; + if (typeof data.event.stopImmediatePropagation === "function") { + data.event.stopImmediatePropagation(); + } + }, + + /** + * The flag to be bubbling. + * @type {boolean} + */ + get bubbles() { + return Boolean(pd(this).event.bubbles) + }, + + /** + * The flag to be cancelable. + * @type {boolean} + */ + get cancelable() { + return Boolean(pd(this).event.cancelable) + }, + + /** + * Cancel this event. + * @returns {void} + */ + preventDefault() { + setCancelFlag(pd(this)); + }, + + /** + * The flag to indicate cancellation state. + * @type {boolean} + */ + get defaultPrevented() { + return pd(this).canceled + }, + + /** + * The flag to be composed. + * @type {boolean} + */ + get composed() { + return Boolean(pd(this).event.composed) + }, + + /** + * The unix time of this event. + * @type {number} + */ + get timeStamp() { + return pd(this).timeStamp + }, + + /** + * The target of this event. + * @type {EventTarget} + * @deprecated + */ + get srcElement() { + return pd(this).eventTarget + }, + + /** + * The flag to stop event bubbling. + * @type {boolean} + * @deprecated + */ + get cancelBubble() { + return pd(this).stopped + }, + set cancelBubble(value) { + if (!value) { + return + } + const data = pd(this); + + data.stopped = true; + if (typeof data.event.cancelBubble === "boolean") { + data.event.cancelBubble = true; + } + }, + + /** + * The flag to indicate cancellation state. + * @type {boolean} + * @deprecated + */ + get returnValue() { + return !pd(this).canceled + }, + set returnValue(value) { + if (!value) { + setCancelFlag(pd(this)); + } + }, + + /** + * Initialize this event object. But do nothing under event dispatching. + * @param {string} type The event type. + * @param {boolean} [bubbles=false] The flag to be possible to bubble up. + * @param {boolean} [cancelable=false] The flag to be possible to cancel. + * @deprecated + */ + initEvent() { + // Do nothing. + }, +}; + +// `constructor` is not enumerable. +Object.defineProperty(Event.prototype, "constructor", { + value: Event, + configurable: true, + writable: true, +}); + +// Ensure `event instanceof window.Event` is `true`. +if (typeof window !== "undefined" && typeof window.Event !== "undefined") { + Object.setPrototypeOf(Event.prototype, window.Event.prototype); + + // Make association for wrappers. + wrappers.set(window.Event.prototype, Event); +} + +/** + * Get the property descriptor to redirect a given property. + * @param {string} key Property name to define property descriptor. + * @returns {PropertyDescriptor} The property descriptor to redirect the property. + * @private + */ +function defineRedirectDescriptor(key) { + return { + get() { + return pd(this).event[key] + }, + set(value) { + pd(this).event[key] = value; + }, + configurable: true, + enumerable: true, + } +} + +/** + * Get the property descriptor to call a given method property. + * @param {string} key Property name to define property descriptor. + * @returns {PropertyDescriptor} The property descriptor to call the method property. + * @private + */ +function defineCallDescriptor(key) { + return { + value() { + const event = pd(this).event; + return event[key].apply(event, arguments) + }, + configurable: true, + enumerable: true, + } +} + +/** + * Define new wrapper class. + * @param {Function} BaseEvent The base wrapper class. + * @param {Object} proto The prototype of the original event. + * @returns {Function} The defined wrapper class. + * @private + */ +function defineWrapper(BaseEvent, proto) { + const keys = Object.keys(proto); + if (keys.length === 0) { + return BaseEvent + } + + /** CustomEvent */ + function CustomEvent(eventTarget, event) { + BaseEvent.call(this, eventTarget, event); + } + + CustomEvent.prototype = Object.create(BaseEvent.prototype, { + constructor: { value: CustomEvent, configurable: true, writable: true }, + }); + + // Define accessors. + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (!(key in BaseEvent.prototype)) { + const descriptor = Object.getOwnPropertyDescriptor(proto, key); + const isFunc = typeof descriptor.value === "function"; + Object.defineProperty( + CustomEvent.prototype, + key, + isFunc + ? defineCallDescriptor(key) + : defineRedirectDescriptor(key) + ); + } + } + + return CustomEvent +} + +/** + * Get the wrapper class of a given prototype. + * @param {Object} proto The prototype of the original event to get its wrapper. + * @returns {Function} The wrapper class. + * @private + */ +function getWrapper(proto) { + if (proto == null || proto === Object.prototype) { + return Event + } + + let wrapper = wrappers.get(proto); + if (wrapper == null) { + wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto); + wrappers.set(proto, wrapper); + } + return wrapper +} + +/** + * Wrap a given event to management a dispatching. + * @param {EventTarget} eventTarget The event target of this dispatching. + * @param {Object} event The event to wrap. + * @returns {Event} The wrapper instance. + * @private + */ +function wrapEvent(eventTarget, event) { + const Wrapper = getWrapper(Object.getPrototypeOf(event)); + return new Wrapper(eventTarget, event) +} + +/** + * Get the immediateStopped flag of a given event. + * @param {Event} event The event to get. + * @returns {boolean} The flag to stop propagation immediately. + * @private + */ +function isStopped(event) { + return pd(event).immediateStopped +} + +/** + * Set the current event phase of a given event. + * @param {Event} event The event to set current target. + * @param {number} eventPhase New event phase. + * @returns {void} + * @private + */ +function setEventPhase(event, eventPhase) { + pd(event).eventPhase = eventPhase; +} + +/** + * Set the current target of a given event. + * @param {Event} event The event to set current target. + * @param {EventTarget|null} currentTarget New current target. + * @returns {void} + * @private + */ +function setCurrentTarget(event, currentTarget) { + pd(event).currentTarget = currentTarget; +} + +/** + * Set a passive listener of a given event. + * @param {Event} event The event to set current target. + * @param {Function|null} passiveListener New passive listener. + * @returns {void} + * @private + */ +function setPassiveListener(event, passiveListener) { + pd(event).passiveListener = passiveListener; +} + +/** + * @typedef {object} ListenerNode + * @property {Function} listener + * @property {1|2|3} listenerType + * @property {boolean} passive + * @property {boolean} once + * @property {ListenerNode|null} next + * @private + */ + +/** + * @type {WeakMap>} + * @private + */ +const listenersMap = new WeakMap(); + +// Listener types +const CAPTURE = 1; +const BUBBLE = 2; +const ATTRIBUTE = 3; + +/** + * Check whether a given value is an object or not. + * @param {any} x The value to check. + * @returns {boolean} `true` if the value is an object. + */ +function isObject(x) { + return x !== null && typeof x === "object" //eslint-disable-line no-restricted-syntax +} + +/** + * Get listeners. + * @param {EventTarget} eventTarget The event target to get. + * @returns {Map} The listeners. + * @private + */ +function getListeners(eventTarget) { + const listeners = listenersMap.get(eventTarget); + if (listeners == null) { + throw new TypeError( + "'this' is expected an EventTarget object, but got another value." + ) + } + return listeners +} + +/** + * Get the property descriptor for the event attribute of a given event. + * @param {string} eventName The event name to get property descriptor. + * @returns {PropertyDescriptor} The property descriptor. + * @private + */ +function defineEventAttributeDescriptor(eventName) { + return { + get() { + const listeners = getListeners(this); + let node = listeners.get(eventName); + while (node != null) { + if (node.listenerType === ATTRIBUTE) { + return node.listener + } + node = node.next; + } + return null + }, + + set(listener) { + if (typeof listener !== "function" && !isObject(listener)) { + listener = null; // eslint-disable-line no-param-reassign + } + const listeners = getListeners(this); + + // Traverse to the tail while removing old value. + let prev = null; + let node = listeners.get(eventName); + while (node != null) { + if (node.listenerType === ATTRIBUTE) { + // Remove old value. + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + } else { + prev = node; + } + + node = node.next; + } + + // Add new value. + if (listener !== null) { + const newNode = { + listener, + listenerType: ATTRIBUTE, + passive: false, + once: false, + next: null, + }; + if (prev === null) { + listeners.set(eventName, newNode); + } else { + prev.next = newNode; + } + } + }, + configurable: true, + enumerable: true, + } +} + +/** + * Define an event attribute (e.g. `eventTarget.onclick`). + * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite. + * @param {string} eventName The event name to define. + * @returns {void} + */ +function defineEventAttribute(eventTargetPrototype, eventName) { + Object.defineProperty( + eventTargetPrototype, + `on${eventName}`, + defineEventAttributeDescriptor(eventName) + ); +} + +/** + * Define a custom EventTarget with event attributes. + * @param {string[]} eventNames Event names for event attributes. + * @returns {EventTarget} The custom EventTarget. + * @private + */ +function defineCustomEventTarget(eventNames) { + /** CustomEventTarget */ + function CustomEventTarget() { + EventTarget.call(this); + } + + CustomEventTarget.prototype = Object.create(EventTarget.prototype, { + constructor: { + value: CustomEventTarget, + configurable: true, + writable: true, + }, + }); + + for (let i = 0; i < eventNames.length; ++i) { + defineEventAttribute(CustomEventTarget.prototype, eventNames[i]); + } + + return CustomEventTarget +} + +/** + * EventTarget. + * + * - This is constructor if no arguments. + * - This is a function which returns a CustomEventTarget constructor if there are arguments. + * + * For example: + * + * class A extends EventTarget {} + * class B extends EventTarget("message") {} + * class C extends EventTarget("message", "error") {} + * class D extends EventTarget(["message", "error"]) {} + */ +function EventTarget() { + /*eslint-disable consistent-return */ + if (this instanceof EventTarget) { + listenersMap.set(this, new Map()); + return + } + if (arguments.length === 1 && Array.isArray(arguments[0])) { + return defineCustomEventTarget(arguments[0]) + } + if (arguments.length > 0) { + const types = new Array(arguments.length); + for (let i = 0; i < arguments.length; ++i) { + types[i] = arguments[i]; + } + return defineCustomEventTarget(types) + } + throw new TypeError("Cannot call a class as a function") + /*eslint-enable consistent-return */ +} + +// Should be enumerable, but class methods are not enumerable. +EventTarget.prototype = { + /** + * Add a given listener to this event target. + * @param {string} eventName The event name to add. + * @param {Function} listener The listener to add. + * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. + * @returns {void} + */ + addEventListener(eventName, listener, options) { + if (listener == null) { + return + } + if (typeof listener !== "function" && !isObject(listener)) { + throw new TypeError("'listener' should be a function or an object.") + } + + const listeners = getListeners(this); + const optionsIsObj = isObject(options); + const capture = optionsIsObj + ? Boolean(options.capture) + : Boolean(options); + const listenerType = capture ? CAPTURE : BUBBLE; + const newNode = { + listener, + listenerType, + passive: optionsIsObj && Boolean(options.passive), + once: optionsIsObj && Boolean(options.once), + next: null, + }; + + // Set it as the first node if the first node is null. + let node = listeners.get(eventName); + if (node === undefined) { + listeners.set(eventName, newNode); + return + } + + // Traverse to the tail while checking duplication.. + let prev = null; + while (node != null) { + if ( + node.listener === listener && + node.listenerType === listenerType + ) { + // Should ignore duplication. + return + } + prev = node; + node = node.next; + } + + // Add it. + prev.next = newNode; + }, + + /** + * Remove a given listener from this event target. + * @param {string} eventName The event name to remove. + * @param {Function} listener The listener to remove. + * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. + * @returns {void} + */ + removeEventListener(eventName, listener, options) { + if (listener == null) { + return + } + + const listeners = getListeners(this); + const capture = isObject(options) + ? Boolean(options.capture) + : Boolean(options); + const listenerType = capture ? CAPTURE : BUBBLE; + + let prev = null; + let node = listeners.get(eventName); + while (node != null) { + if ( + node.listener === listener && + node.listenerType === listenerType + ) { + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + return + } + + prev = node; + node = node.next; + } + }, + + /** + * Dispatch a given event. + * @param {Event|{type:string}} event The event to dispatch. + * @returns {boolean} `false` if canceled. + */ + dispatchEvent(event) { + if (event == null || typeof event.type !== "string") { + throw new TypeError('"event.type" should be a string.') + } + + // If listeners aren't registered, terminate. + const listeners = getListeners(this); + const eventName = event.type; + let node = listeners.get(eventName); + if (node == null) { + return true + } + + // Since we cannot rewrite several properties, so wrap object. + const wrappedEvent = wrapEvent(this, event); + + // This doesn't process capturing phase and bubbling phase. + // This isn't participating in a tree. + let prev = null; + while (node != null) { + // Remove this listener if it's once + if (node.once) { + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + } else { + prev = node; + } + + // Call this listener + setPassiveListener( + wrappedEvent, + node.passive ? node.listener : null + ); + if (typeof node.listener === "function") { + try { + node.listener.call(this, wrappedEvent); + } catch (err) { + if ( + typeof console !== "undefined" && + typeof console.error === "function" + ) { + console.error(err); + } + } + } else if ( + node.listenerType !== ATTRIBUTE && + typeof node.listener.handleEvent === "function" + ) { + node.listener.handleEvent(wrappedEvent); + } + + // Break if `event.stopImmediatePropagation` was called. + if (isStopped(wrappedEvent)) { + break + } + + node = node.next; + } + setPassiveListener(wrappedEvent, null); + setEventPhase(wrappedEvent, 0); + setCurrentTarget(wrappedEvent, null); + + return !wrappedEvent.defaultPrevented + }, +}; + +// `constructor` is not enumerable. +Object.defineProperty(EventTarget.prototype, "constructor", { + value: EventTarget, + configurable: true, + writable: true, +}); + +// Ensure `eventTarget instanceof window.EventTarget` is `true`. +if ( + typeof window !== "undefined" && + typeof window.EventTarget !== "undefined" +) { + Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype); +} + +exports.defineEventAttribute = defineEventAttribute; +exports.EventTarget = EventTarget; +exports["default"] = EventTarget; + +module.exports = EventTarget +module.exports.EventTarget = module.exports["default"] = EventTarget +module.exports.defineEventAttribute = defineEventAttribute +//# sourceMappingURL=event-target-shim.js.map + + +/***/ }), + +/***/ 3860: +/***/ ((module) => { + +"use strict"; + + +var hasOwn = Object.prototype.hasOwnProperty; +var toStr = Object.prototype.toString; +var defineProperty = Object.defineProperty; +var gOPD = Object.getOwnPropertyDescriptor; + +var isArray = function isArray(arr) { + if (typeof Array.isArray === 'function') { + return Array.isArray(arr); + } + + return toStr.call(arr) === '[object Array]'; +}; + +var isPlainObject = function isPlainObject(obj) { + if (!obj || toStr.call(obj) !== '[object Object]') { + return false; + } + + var hasOwnConstructor = hasOwn.call(obj, 'constructor'); + var hasIsPrototypeOf = obj.constructor && obj.constructor.prototype && hasOwn.call(obj.constructor.prototype, 'isPrototypeOf'); + // Not own constructor property must be Object + if (obj.constructor && !hasOwnConstructor && !hasIsPrototypeOf) { + return false; + } + + // Own properties are enumerated firstly, so to speed up, + // if last one is own, then all properties are own. + var key; + for (key in obj) { /**/ } + + return typeof key === 'undefined' || hasOwn.call(obj, key); +}; + +// If name is '__proto__', and Object.defineProperty is available, define __proto__ as an own property on target +var setProperty = function setProperty(target, options) { + if (defineProperty && options.name === '__proto__') { + defineProperty(target, options.name, { + enumerable: true, + configurable: true, + value: options.newValue, + writable: true + }); + } else { + target[options.name] = options.newValue; + } +}; + +// Return undefined instead of __proto__ if '__proto__' is not an own property +var getProperty = function getProperty(obj, name) { + if (name === '__proto__') { + if (!hasOwn.call(obj, name)) { + return void 0; + } else if (gOPD) { + // In early versions of node, obj['__proto__'] is buggy when obj has + // __proto__ as an own property. Object.getOwnPropertyDescriptor() works. + return gOPD(obj, name).value; + } + } + + return obj[name]; +}; + +module.exports = function extend() { + var options, name, src, copy, copyIsArray, clone; + var target = arguments[0]; + var i = 1; + var length = arguments.length; + var deep = false; + + // Handle a deep copy situation + if (typeof target === 'boolean') { + deep = target; + target = arguments[1] || {}; + // skip the boolean and the target + i = 2; + } + if (target == null || (typeof target !== 'object' && typeof target !== 'function')) { + target = {}; + } + + for (; i < length; ++i) { + options = arguments[i]; + // Only deal with non-null/undefined values + if (options != null) { + // Extend the base object + for (name in options) { + src = getProperty(target, name); + copy = getProperty(options, name); + + // Prevent never-ending loop + if (target !== copy) { + // Recurse if we're merging plain objects or arrays + if (deep && copy && (isPlainObject(copy) || (copyIsArray = isArray(copy)))) { + if (copyIsArray) { + copyIsArray = false; + clone = src && isArray(src) ? src : []; + } else { + clone = src && isPlainObject(src) ? src : {}; + } + + // Never move original objects, clone them + setProperty(target, { name: name, newValue: extend(deep, clone, copy) }); + + // Don't bring in undefined values + } else if (typeof copy !== 'undefined') { + setProperty(target, { name: name, newValue: copy }); + } + } + } + } + } + + // Return the modified object + return target; +}; + + +/***/ }), + +/***/ 9741: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const validator = __nccwpck_require__(9433); +const XMLParser = __nccwpck_require__(9844); +const XMLBuilder = __nccwpck_require__(659); + +module.exports = { + XMLParser: XMLParser, + XMLValidator: validator, + XMLBuilder: XMLBuilder +} + +/***/ }), + +/***/ 812: +/***/ ((module) => { + +function getIgnoreAttributesFn(ignoreAttributes) { + if (typeof ignoreAttributes === 'function') { + return ignoreAttributes + } + if (Array.isArray(ignoreAttributes)) { + return (attrName) => { + for (const pattern of ignoreAttributes) { + if (typeof pattern === 'string' && attrName === pattern) { + return true + } + if (pattern instanceof RegExp && pattern.test(attrName)) { + return true + } + } + } + } + return () => false +} + +module.exports = getIgnoreAttributesFn + +/***/ }), + +/***/ 7019: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +const nameStartChar = ':A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD'; +const nameChar = nameStartChar + '\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040'; +const nameRegexp = '[' + nameStartChar + '][' + nameChar + ']*' +const regexName = new RegExp('^' + nameRegexp + '$'); + +const getAllMatches = function(string, regex) { + const matches = []; + let match = regex.exec(string); + while (match) { + const allmatches = []; + allmatches.startIndex = regex.lastIndex - match[0].length; + const len = match.length; + for (let index = 0; index < len; index++) { + allmatches.push(match[index]); + } + matches.push(allmatches); + match = regex.exec(string); + } + return matches; +}; + +const isName = function(string) { + const match = regexName.exec(string); + return !(match === null || typeof match === 'undefined'); +}; + +exports.isExist = function(v) { + return typeof v !== 'undefined'; +}; + +exports.isEmptyObject = function(obj) { + return Object.keys(obj).length === 0; +}; + +/** + * Copy all the properties of a into b. + * @param {*} target + * @param {*} a + */ +exports.merge = function(target, a, arrayMode) { + if (a) { + const keys = Object.keys(a); // will return an array of own properties + const len = keys.length; //don't make it inline + for (let i = 0; i < len; i++) { + if (arrayMode === 'strict') { + target[keys[i]] = [ a[keys[i]] ]; + } else { + target[keys[i]] = a[keys[i]]; + } + } + } +}; +/* exports.merge =function (b,a){ + return Object.assign(b,a); +} */ + +exports.getValue = function(v) { + if (exports.isExist(v)) { + return v; + } else { + return ''; + } +}; + +// const fakeCall = function(a) {return a;}; +// const fakeCallNoReturn = function() {}; + +exports.isName = isName; +exports.getAllMatches = getAllMatches; +exports.nameRegexp = nameRegexp; + + +/***/ }), + +/***/ 9433: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +const util = __nccwpck_require__(7019); + +const defaultOptions = { + allowBooleanAttributes: false, //A tag can have attributes without any value + unpairedTags: [] +}; + +//const tagsPattern = new RegExp("<\\/?([\\w:\\-_\.]+)\\s*\/?>","g"); +exports.validate = function (xmlData, options) { + options = Object.assign({}, defaultOptions, options); + + //xmlData = xmlData.replace(/(\r\n|\n|\r)/gm,"");//make it single line + //xmlData = xmlData.replace(/(^\s*<\?xml.*?\?>)/g,"");//Remove XML starting tag + //xmlData = xmlData.replace(/()/g,"");//Remove DOCTYPE + const tags = []; + let tagFound = false; + + //indicates that the root tag has been closed (aka. depth 0 has been reached) + let reachedRoot = false; + + if (xmlData[0] === '\ufeff') { + // check for byte order mark (BOM) + xmlData = xmlData.substr(1); + } + + for (let i = 0; i < xmlData.length; i++) { + + if (xmlData[i] === '<' && xmlData[i+1] === '?') { + i+=2; + i = readPI(xmlData,i); + if (i.err) return i; + }else if (xmlData[i] === '<') { + //starting of tag + //read until you reach to '>' avoiding any '>' in attribute value + let tagStartPos = i; + i++; + + if (xmlData[i] === '!') { + i = readCommentAndCDATA(xmlData, i); + continue; + } else { + let closingTag = false; + if (xmlData[i] === '/') { + //closing tag + closingTag = true; + i++; + } + //read tagname + let tagName = ''; + for (; i < xmlData.length && + xmlData[i] !== '>' && + xmlData[i] !== ' ' && + xmlData[i] !== '\t' && + xmlData[i] !== '\n' && + xmlData[i] !== '\r'; i++ + ) { + tagName += xmlData[i]; + } + tagName = tagName.trim(); + //console.log(tagName); + + if (tagName[tagName.length - 1] === '/') { + //self closing tag without attributes + tagName = tagName.substring(0, tagName.length - 1); + //continue; + i--; + } + if (!validateTagName(tagName)) { + let msg; + if (tagName.trim().length === 0) { + msg = "Invalid space after '<'."; + } else { + msg = "Tag '"+tagName+"' is an invalid name."; + } + return getErrorObject('InvalidTag', msg, getLineNumberForPosition(xmlData, i)); + } + + const result = readAttributeStr(xmlData, i); + if (result === false) { + return getErrorObject('InvalidAttr', "Attributes for '"+tagName+"' have open quote.", getLineNumberForPosition(xmlData, i)); + } + let attrStr = result.value; + i = result.index; + + if (attrStr[attrStr.length - 1] === '/') { + //self closing tag + const attrStrStart = i - attrStr.length; + attrStr = attrStr.substring(0, attrStr.length - 1); + const isValid = validateAttributeString(attrStr, options); + if (isValid === true) { + tagFound = true; + //continue; //text may presents after self closing tag + } else { + //the result from the nested function returns the position of the error within the attribute + //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute + //this gives us the absolute index in the entire xml, which we can use to find the line at last + return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, attrStrStart + isValid.err.line)); + } + } else if (closingTag) { + if (!result.tagClosed) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' doesn't have proper closing.", getLineNumberForPosition(xmlData, i)); + } else if (attrStr.trim().length > 0) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' can't have attributes or invalid starting.", getLineNumberForPosition(xmlData, tagStartPos)); + } else if (tags.length === 0) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' has not been opened.", getLineNumberForPosition(xmlData, tagStartPos)); + } else { + const otg = tags.pop(); + if (tagName !== otg.tagName) { + let openPos = getLineNumberForPosition(xmlData, otg.tagStartPos); + return getErrorObject('InvalidTag', + "Expected closing tag '"+otg.tagName+"' (opened in line "+openPos.line+", col "+openPos.col+") instead of closing tag '"+tagName+"'.", + getLineNumberForPosition(xmlData, tagStartPos)); + } + + //when there are no more tags, we reached the root level. + if (tags.length == 0) { + reachedRoot = true; + } + } + } else { + const isValid = validateAttributeString(attrStr, options); + if (isValid !== true) { + //the result from the nested function returns the position of the error within the attribute + //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute + //this gives us the absolute index in the entire xml, which we can use to find the line at last + return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, i - attrStr.length + isValid.err.line)); + } + + //if the root level has been reached before ... + if (reachedRoot === true) { + return getErrorObject('InvalidXml', 'Multiple possible root nodes found.', getLineNumberForPosition(xmlData, i)); + } else if(options.unpairedTags.indexOf(tagName) !== -1){ + //don't push into stack + } else { + tags.push({tagName, tagStartPos}); + } + tagFound = true; + } + + //skip tag text value + //It may include comments and CDATA value + for (i++; i < xmlData.length; i++) { + if (xmlData[i] === '<') { + if (xmlData[i + 1] === '!') { + //comment or CADATA + i++; + i = readCommentAndCDATA(xmlData, i); + continue; + } else if (xmlData[i+1] === '?') { + i = readPI(xmlData, ++i); + if (i.err) return i; + } else{ + break; + } + } else if (xmlData[i] === '&') { + const afterAmp = validateAmpersand(xmlData, i); + if (afterAmp == -1) + return getErrorObject('InvalidChar', "char '&' is not expected.", getLineNumberForPosition(xmlData, i)); + i = afterAmp; + }else{ + if (reachedRoot === true && !isWhiteSpace(xmlData[i])) { + return getErrorObject('InvalidXml', "Extra text at the end", getLineNumberForPosition(xmlData, i)); + } + } + } //end of reading tag text value + if (xmlData[i] === '<') { + i--; + } + } + } else { + if ( isWhiteSpace(xmlData[i])) { + continue; + } + return getErrorObject('InvalidChar', "char '"+xmlData[i]+"' is not expected.", getLineNumberForPosition(xmlData, i)); + } + } + + if (!tagFound) { + return getErrorObject('InvalidXml', 'Start tag expected.', 1); + }else if (tags.length == 1) { + return getErrorObject('InvalidTag', "Unclosed tag '"+tags[0].tagName+"'.", getLineNumberForPosition(xmlData, tags[0].tagStartPos)); + }else if (tags.length > 0) { + return getErrorObject('InvalidXml', "Invalid '"+ + JSON.stringify(tags.map(t => t.tagName), null, 4).replace(/\r?\n/g, '')+ + "' found.", {line: 1, col: 1}); + } + + return true; +}; + +function isWhiteSpace(char){ + return char === ' ' || char === '\t' || char === '\n' || char === '\r'; +} +/** + * Read Processing insstructions and skip + * @param {*} xmlData + * @param {*} i + */ +function readPI(xmlData, i) { + const start = i; + for (; i < xmlData.length; i++) { + if (xmlData[i] == '?' || xmlData[i] == ' ') { + //tagname + const tagname = xmlData.substr(start, i - start); + if (i > 5 && tagname === 'xml') { + return getErrorObject('InvalidXml', 'XML declaration allowed only at the start of the document.', getLineNumberForPosition(xmlData, i)); + } else if (xmlData[i] == '?' && xmlData[i + 1] == '>') { + //check if valid attribut string + i++; + break; + } else { + continue; + } + } + } + return i; +} + +function readCommentAndCDATA(xmlData, i) { + if (xmlData.length > i + 5 && xmlData[i + 1] === '-' && xmlData[i + 2] === '-') { + //comment + for (i += 3; i < xmlData.length; i++) { + if (xmlData[i] === '-' && xmlData[i + 1] === '-' && xmlData[i + 2] === '>') { + i += 2; + break; + } + } + } else if ( + xmlData.length > i + 8 && + xmlData[i + 1] === 'D' && + xmlData[i + 2] === 'O' && + xmlData[i + 3] === 'C' && + xmlData[i + 4] === 'T' && + xmlData[i + 5] === 'Y' && + xmlData[i + 6] === 'P' && + xmlData[i + 7] === 'E' + ) { + let angleBracketsCount = 1; + for (i += 8; i < xmlData.length; i++) { + if (xmlData[i] === '<') { + angleBracketsCount++; + } else if (xmlData[i] === '>') { + angleBracketsCount--; + if (angleBracketsCount === 0) { + break; + } + } + } + } else if ( + xmlData.length > i + 9 && + xmlData[i + 1] === '[' && + xmlData[i + 2] === 'C' && + xmlData[i + 3] === 'D' && + xmlData[i + 4] === 'A' && + xmlData[i + 5] === 'T' && + xmlData[i + 6] === 'A' && + xmlData[i + 7] === '[' + ) { + for (i += 8; i < xmlData.length; i++) { + if (xmlData[i] === ']' && xmlData[i + 1] === ']' && xmlData[i + 2] === '>') { + i += 2; + break; + } + } + } + + return i; +} + +const doubleQuote = '"'; +const singleQuote = "'"; + +/** + * Keep reading xmlData until '<' is found outside the attribute value. + * @param {string} xmlData + * @param {number} i + */ +function readAttributeStr(xmlData, i) { + let attrStr = ''; + let startChar = ''; + let tagClosed = false; + for (; i < xmlData.length; i++) { + if (xmlData[i] === doubleQuote || xmlData[i] === singleQuote) { + if (startChar === '') { + startChar = xmlData[i]; + } else if (startChar !== xmlData[i]) { + //if vaue is enclosed with double quote then single quotes are allowed inside the value and vice versa + } else { + startChar = ''; + } + } else if (xmlData[i] === '>') { + if (startChar === '') { + tagClosed = true; + break; + } + } + attrStr += xmlData[i]; + } + if (startChar !== '') { + return false; + } + + return { + value: attrStr, + index: i, + tagClosed: tagClosed + }; +} + +/** + * Select all the attributes whether valid or invalid. + */ +const validAttrStrRegxp = new RegExp('(\\s*)([^\\s=]+)(\\s*=)?(\\s*([\'"])(([\\s\\S])*?)\\5)?', 'g'); + +//attr, ="sd", a="amit's", a="sd"b="saf", ab cd="" + +function validateAttributeString(attrStr, options) { + //console.log("start:"+attrStr+":end"); + + //if(attrStr.trim().length === 0) return true; //empty string + + const matches = util.getAllMatches(attrStr, validAttrStrRegxp); + const attrNames = {}; + + for (let i = 0; i < matches.length; i++) { + if (matches[i][1].length === 0) { + //nospace before attribute name: a="sd"b="saf" + return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' has no space in starting.", getPositionFromMatch(matches[i])) + } else if (matches[i][3] !== undefined && matches[i][4] === undefined) { + return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' is without value.", getPositionFromMatch(matches[i])); + } else if (matches[i][3] === undefined && !options.allowBooleanAttributes) { + //independent attribute: ab + return getErrorObject('InvalidAttr', "boolean attribute '"+matches[i][2]+"' is not allowed.", getPositionFromMatch(matches[i])); + } + /* else if(matches[i][6] === undefined){//attribute without value: ab= + return { err: { code:"InvalidAttr",msg:"attribute " + matches[i][2] + " has no value assigned."}}; + } */ + const attrName = matches[i][2]; + if (!validateAttrName(attrName)) { + return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is an invalid name.", getPositionFromMatch(matches[i])); + } + if (!attrNames.hasOwnProperty(attrName)) { + //check for duplicate attribute. + attrNames[attrName] = 1; + } else { + return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is repeated.", getPositionFromMatch(matches[i])); + } + } + + return true; +} + +function validateNumberAmpersand(xmlData, i) { + let re = /\d/; + if (xmlData[i] === 'x') { + i++; + re = /[\da-fA-F]/; + } + for (; i < xmlData.length; i++) { + if (xmlData[i] === ';') + return i; + if (!xmlData[i].match(re)) + break; + } + return -1; +} + +function validateAmpersand(xmlData, i) { + // https://www.w3.org/TR/xml/#dt-charref + i++; + if (xmlData[i] === ';') + return -1; + if (xmlData[i] === '#') { + i++; + return validateNumberAmpersand(xmlData, i); + } + let count = 0; + for (; i < xmlData.length; i++, count++) { + if (xmlData[i].match(/\w/) && count < 20) + continue; + if (xmlData[i] === ';') + break; + return -1; + } + return i; +} + +function getErrorObject(code, message, lineNumber) { + return { + err: { + code: code, + msg: message, + line: lineNumber.line || lineNumber, + col: lineNumber.col, + }, + }; +} + +function validateAttrName(attrName) { + return util.isName(attrName); +} + +// const startsWithXML = /^xml/i; + +function validateTagName(tagname) { + return util.isName(tagname) /* && !tagname.match(startsWithXML) */; +} + +//this function returns the line number for the character at the given index +function getLineNumberForPosition(xmlData, index) { + const lines = xmlData.substring(0, index).split(/\r?\n/); + return { + line: lines.length, + + // column number is last line's length + 1, because column numbering starts at 1: + col: lines[lines.length - 1].length + 1 + }; +} + +//this function returns the position of the first character of match within attrStr +function getPositionFromMatch(match) { + return match.startIndex + match[1].length; +} + + +/***/ }), + +/***/ 659: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +//parse Empty Node as self closing node +const buildFromOrderedJs = __nccwpck_require__(6378); +const getIgnoreAttributesFn = __nccwpck_require__(812) + +const defaultOptions = { + attributeNamePrefix: '@_', + attributesGroupName: false, + textNodeName: '#text', + ignoreAttributes: true, + cdataPropName: false, + format: false, + indentBy: ' ', + suppressEmptyNode: false, + suppressUnpairedNode: true, + suppressBooleanAttributes: true, + tagValueProcessor: function(key, a) { + return a; + }, + attributeValueProcessor: function(attrName, a) { + return a; + }, + preserveOrder: false, + commentPropName: false, + unpairedTags: [], + entities: [ + { regex: new RegExp("&", "g"), val: "&" },//it must be on top + { regex: new RegExp(">", "g"), val: ">" }, + { regex: new RegExp("<", "g"), val: "<" }, + { regex: new RegExp("\'", "g"), val: "'" }, + { regex: new RegExp("\"", "g"), val: """ } + ], + processEntities: true, + stopNodes: [], + // transformTagName: false, + // transformAttributeName: false, + oneListGroup: false +}; + +function Builder(options) { + this.options = Object.assign({}, defaultOptions, options); + if (this.options.ignoreAttributes === true || this.options.attributesGroupName) { + this.isAttribute = function(/*a*/) { + return false; + }; + } else { + this.ignoreAttributesFn = getIgnoreAttributesFn(this.options.ignoreAttributes) + this.attrPrefixLen = this.options.attributeNamePrefix.length; + this.isAttribute = isAttribute; + } + + this.processTextOrObjNode = processTextOrObjNode + + if (this.options.format) { + this.indentate = indentate; + this.tagEndChar = '>\n'; + this.newLine = '\n'; + } else { + this.indentate = function() { + return ''; + }; + this.tagEndChar = '>'; + this.newLine = ''; + } +} + +Builder.prototype.build = function(jObj) { + if(this.options.preserveOrder){ + return buildFromOrderedJs(jObj, this.options); + }else { + if(Array.isArray(jObj) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1){ + jObj = { + [this.options.arrayNodeName] : jObj + } + } + return this.j2x(jObj, 0, []).val; + } +}; + +Builder.prototype.j2x = function(jObj, level, ajPath) { + let attrStr = ''; + let val = ''; + const jPath = ajPath.join('.') + for (let key in jObj) { + if(!Object.prototype.hasOwnProperty.call(jObj, key)) continue; + if (typeof jObj[key] === 'undefined') { + // supress undefined node only if it is not an attribute + if (this.isAttribute(key)) { + val += ''; + } + } else if (jObj[key] === null) { + // null attribute should be ignored by the attribute list, but should not cause the tag closing + if (this.isAttribute(key)) { + val += ''; + } else if (key === this.options.cdataPropName) { + val += ''; + } else if (key[0] === '?') { + val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; + } else { + val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } + // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } else if (jObj[key] instanceof Date) { + val += this.buildTextValNode(jObj[key], key, '', level); + } else if (typeof jObj[key] !== 'object') { + //premitive type + const attr = this.isAttribute(key); + if (attr && !this.ignoreAttributesFn(attr, jPath)) { + attrStr += this.buildAttrPairStr(attr, '' + jObj[key]); + } else if (!attr) { + //tag value + if (key === this.options.textNodeName) { + let newval = this.options.tagValueProcessor(key, '' + jObj[key]); + val += this.replaceEntitiesValue(newval); + } else { + val += this.buildTextValNode(jObj[key], key, '', level); + } + } + } else if (Array.isArray(jObj[key])) { + //repeated nodes + const arrLen = jObj[key].length; + let listTagVal = ""; + let listTagAttr = ""; + for (let j = 0; j < arrLen; j++) { + const item = jObj[key][j]; + if (typeof item === 'undefined') { + // supress undefined node + } else if (item === null) { + if(key[0] === "?") val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; + else val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } else if (typeof item === 'object') { + if(this.options.oneListGroup){ + const result = this.j2x(item, level + 1, ajPath.concat(key)); + listTagVal += result.val; + if (this.options.attributesGroupName && item.hasOwnProperty(this.options.attributesGroupName)) { + listTagAttr += result.attrStr + } + }else{ + listTagVal += this.processTextOrObjNode(item, key, level, ajPath) + } + } else { + if (this.options.oneListGroup) { + let textValue = this.options.tagValueProcessor(key, item); + textValue = this.replaceEntitiesValue(textValue); + listTagVal += textValue; + } else { + listTagVal += this.buildTextValNode(item, key, '', level); + } + } + } + if(this.options.oneListGroup){ + listTagVal = this.buildObjectNode(listTagVal, key, listTagAttr, level); + } + val += listTagVal; + } else { + //nested node + if (this.options.attributesGroupName && key === this.options.attributesGroupName) { + const Ks = Object.keys(jObj[key]); + const L = Ks.length; + for (let j = 0; j < L; j++) { + attrStr += this.buildAttrPairStr(Ks[j], '' + jObj[key][Ks[j]]); + } + } else { + val += this.processTextOrObjNode(jObj[key], key, level, ajPath) + } + } + } + return {attrStr: attrStr, val: val}; +}; + +Builder.prototype.buildAttrPairStr = function(attrName, val){ + val = this.options.attributeValueProcessor(attrName, '' + val); + val = this.replaceEntitiesValue(val); + if (this.options.suppressBooleanAttributes && val === "true") { + return ' ' + attrName; + } else return ' ' + attrName + '="' + val + '"'; +} + +function processTextOrObjNode (object, key, level, ajPath) { + const result = this.j2x(object, level + 1, ajPath.concat(key)); + if (object[this.options.textNodeName] !== undefined && Object.keys(object).length === 1) { + return this.buildTextValNode(object[this.options.textNodeName], key, result.attrStr, level); + } else { + return this.buildObjectNode(result.val, key, result.attrStr, level); + } +} + +Builder.prototype.buildObjectNode = function(val, key, attrStr, level) { + if(val === ""){ + if(key[0] === "?") return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; + else { + return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; + } + }else{ + + let tagEndExp = '' + val + tagEndExp ); + } else if (this.options.commentPropName !== false && key === this.options.commentPropName && piClosingChar.length === 0) { + return this.indentate(level) + `` + this.newLine; + }else { + return ( + this.indentate(level) + '<' + key + attrStr + piClosingChar + this.tagEndChar + + val + + this.indentate(level) + tagEndExp ); + } + } +} + +Builder.prototype.closeTag = function(key){ + let closeTag = ""; + if(this.options.unpairedTags.indexOf(key) !== -1){ //unpaired + if(!this.options.suppressUnpairedNode) closeTag = "/" + }else if(this.options.suppressEmptyNode){ //empty + closeTag = "/"; + }else{ + closeTag = `>` + this.newLine; + }else if (this.options.commentPropName !== false && key === this.options.commentPropName) { + return this.indentate(level) + `` + this.newLine; + }else if(key[0] === "?") {//PI tag + return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; + }else{ + let textValue = this.options.tagValueProcessor(key, val); + textValue = this.replaceEntitiesValue(textValue); + + if( textValue === ''){ + return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; + }else{ + return this.indentate(level) + '<' + key + attrStr + '>' + + textValue + + ' 0 && this.options.processEntities){ + for (let i=0; i { + +const EOL = "\n"; + +/** + * + * @param {array} jArray + * @param {any} options + * @returns + */ +function toXml(jArray, options) { + let indentation = ""; + if (options.format && options.indentBy.length > 0) { + indentation = EOL; + } + return arrToStr(jArray, options, "", indentation); +} + +function arrToStr(arr, options, jPath, indentation) { + let xmlStr = ""; + let isPreviousElementTag = false; + + for (let i = 0; i < arr.length; i++) { + const tagObj = arr[i]; + const tagName = propName(tagObj); + if(tagName === undefined) continue; + + let newJPath = ""; + if (jPath.length === 0) newJPath = tagName + else newJPath = `${jPath}.${tagName}`; + + if (tagName === options.textNodeName) { + let tagText = tagObj[tagName]; + if (!isStopNode(newJPath, options)) { + tagText = options.tagValueProcessor(tagName, tagText); + tagText = replaceEntitiesValue(tagText, options); + } + if (isPreviousElementTag) { + xmlStr += indentation; + } + xmlStr += tagText; + isPreviousElementTag = false; + continue; + } else if (tagName === options.cdataPropName) { + if (isPreviousElementTag) { + xmlStr += indentation; + } + xmlStr += ``; + isPreviousElementTag = false; + continue; + } else if (tagName === options.commentPropName) { + xmlStr += indentation + ``; + isPreviousElementTag = true; + continue; + } else if (tagName[0] === "?") { + const attStr = attr_to_str(tagObj[":@"], options); + const tempInd = tagName === "?xml" ? "" : indentation; + let piTextNodeName = tagObj[tagName][0][options.textNodeName]; + piTextNodeName = piTextNodeName.length !== 0 ? " " + piTextNodeName : ""; //remove extra spacing + xmlStr += tempInd + `<${tagName}${piTextNodeName}${attStr}?>`; + isPreviousElementTag = true; + continue; + } + let newIdentation = indentation; + if (newIdentation !== "") { + newIdentation += options.indentBy; + } + const attStr = attr_to_str(tagObj[":@"], options); + const tagStart = indentation + `<${tagName}${attStr}`; + const tagValue = arrToStr(tagObj[tagName], options, newJPath, newIdentation); + if (options.unpairedTags.indexOf(tagName) !== -1) { + if (options.suppressUnpairedNode) xmlStr += tagStart + ">"; + else xmlStr += tagStart + "/>"; + } else if ((!tagValue || tagValue.length === 0) && options.suppressEmptyNode) { + xmlStr += tagStart + "/>"; + } else if (tagValue && tagValue.endsWith(">")) { + xmlStr += tagStart + `>${tagValue}${indentation}`; + } else { + xmlStr += tagStart + ">"; + if (tagValue && indentation !== "" && (tagValue.includes("/>") || tagValue.includes("`; + } + isPreviousElementTag = true; + } + + return xmlStr; +} + +function propName(obj) { + const keys = Object.keys(obj); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + if(!obj.hasOwnProperty(key)) continue; + if (key !== ":@") return key; + } +} + +function attr_to_str(attrMap, options) { + let attrStr = ""; + if (attrMap && !options.ignoreAttributes) { + for (let attr in attrMap) { + if(!attrMap.hasOwnProperty(attr)) continue; + let attrVal = options.attributeValueProcessor(attr, attrMap[attr]); + attrVal = replaceEntitiesValue(attrVal, options); + if (attrVal === true && options.suppressBooleanAttributes) { + attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}`; + } else { + attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}="${attrVal}"`; + } + } + } + return attrStr; +} + +function isStopNode(jPath, options) { + jPath = jPath.substr(0, jPath.length - options.textNodeName.length - 1); + let tagName = jPath.substr(jPath.lastIndexOf(".") + 1); + for (let index in options.stopNodes) { + if (options.stopNodes[index] === jPath || options.stopNodes[index] === "*." + tagName) return true; + } + return false; +} + +function replaceEntitiesValue(textValue, options) { + if (textValue && textValue.length > 0 && options.processEntities) { + for (let i = 0; i < options.entities.length; i++) { + const entity = options.entities[i]; + textValue = textValue.replace(entity.regex, entity.val); + } + } + return textValue; +} +module.exports = toXml; + + +/***/ }), + +/***/ 151: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const util = __nccwpck_require__(7019); + +//TODO: handle comments +function readDocType(xmlData, i){ + + const entities = {}; + if( xmlData[i + 3] === 'O' && + xmlData[i + 4] === 'C' && + xmlData[i + 5] === 'T' && + xmlData[i + 6] === 'Y' && + xmlData[i + 7] === 'P' && + xmlData[i + 8] === 'E') + { + i = i+9; + let angleBracketsCount = 1; + let hasBody = false, comment = false; + let exp = ""; + for(;i') { //Read tag content + if(comment){ + if( xmlData[i - 1] === "-" && xmlData[i - 2] === "-"){ + comment = false; + angleBracketsCount--; + } + }else{ + angleBracketsCount--; + } + if (angleBracketsCount === 0) { + break; + } + }else if( xmlData[i] === '['){ + hasBody = true; + }else{ + exp += xmlData[i]; + } + } + if(angleBracketsCount !== 0){ + throw new Error(`Unclosed DOCTYPE`); + } + }else{ + throw new Error(`Invalid Tag instead of DOCTYPE`); + } + return {entities, i}; +} + +function readEntityExp(xmlData,i){ + //External entities are not supported + // + + //Parameter entities are not supported + // + + //Internal entities are supported + // + + //read EntityName + let entityName = ""; + for (; i < xmlData.length && (xmlData[i] !== "'" && xmlData[i] !== '"' ); i++) { + // if(xmlData[i] === " ") continue; + // else + entityName += xmlData[i]; + } + entityName = entityName.trim(); + if(entityName.indexOf(" ") !== -1) throw new Error("External entites are not supported"); + + //read Entity Value + const startChar = xmlData[i++]; + let val = "" + for (; i < xmlData.length && xmlData[i] !== startChar ; i++) { + val += xmlData[i]; + } + return [entityName, val, i]; +} + +function isComment(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === '-' && + xmlData[i+3] === '-') return true + return false +} +function isEntity(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'E' && + xmlData[i+3] === 'N' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'I' && + xmlData[i+6] === 'T' && + xmlData[i+7] === 'Y') return true + return false +} +function isElement(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'E' && + xmlData[i+3] === 'L' && + xmlData[i+4] === 'E' && + xmlData[i+5] === 'M' && + xmlData[i+6] === 'E' && + xmlData[i+7] === 'N' && + xmlData[i+8] === 'T') return true + return false +} + +function isAttlist(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'A' && + xmlData[i+3] === 'T' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'L' && + xmlData[i+6] === 'I' && + xmlData[i+7] === 'S' && + xmlData[i+8] === 'T') return true + return false +} +function isNotation(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'N' && + xmlData[i+3] === 'O' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'A' && + xmlData[i+6] === 'T' && + xmlData[i+7] === 'I' && + xmlData[i+8] === 'O' && + xmlData[i+9] === 'N') return true + return false +} + +function validateEntityName(name){ + if (util.isName(name)) + return name; + else + throw new Error(`Invalid entity name ${name}`); +} + +module.exports = readDocType; + + +/***/ }), + +/***/ 4769: +/***/ ((__unused_webpack_module, exports) => { + + +const defaultOptions = { + preserveOrder: false, + attributeNamePrefix: '@_', + attributesGroupName: false, + textNodeName: '#text', + ignoreAttributes: true, + removeNSPrefix: false, // remove NS from tag name or attribute name if true + allowBooleanAttributes: false, //a tag can have attributes without any value + //ignoreRootElement : false, + parseTagValue: true, + parseAttributeValue: false, + trimValues: true, //Trim string values of tag and attributes + cdataPropName: false, + numberParseOptions: { + hex: true, + leadingZeros: true, + eNotation: true + }, + tagValueProcessor: function(tagName, val) { + return val; + }, + attributeValueProcessor: function(attrName, val) { + return val; + }, + stopNodes: [], //nested tags will not be parsed even for errors + alwaysCreateTextNode: false, + isArray: () => false, + commentPropName: false, + unpairedTags: [], + processEntities: true, + htmlEntities: false, + ignoreDeclaration: false, + ignorePiTags: false, + transformTagName: false, + transformAttributeName: false, + updateTag: function(tagName, jPath, attrs){ + return tagName + }, + // skipEmptyListItem: false +}; + +const buildOptions = function(options) { + return Object.assign({}, defaultOptions, options); +}; + +exports.buildOptions = buildOptions; +exports.defaultOptions = defaultOptions; + +/***/ }), + +/***/ 3017: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +///@ts-check + +const util = __nccwpck_require__(7019); +const xmlNode = __nccwpck_require__(9307); +const readDocType = __nccwpck_require__(151); +const toNumber = __nccwpck_require__(6496); +const getIgnoreAttributesFn = __nccwpck_require__(812) + +// const regx = +// '<((!\\[CDATA\\[([\\s\\S]*?)(]]>))|((NAME:)?(NAME))([^>]*)>|((\\/)(NAME)\\s*>))([^<]*)' +// .replace(/NAME/g, util.nameRegexp); + +//const tagsRegx = new RegExp("<(\\/?[\\w:\\-\._]+)([^>]*)>(\\s*"+cdataRegx+")*([^<]+)?","g"); +//const tagsRegx = new RegExp("<(\\/?)((\\w*:)?([\\w:\\-\._]+))([^>]*)>([^<]*)("+cdataRegx+"([^<]*))*([^<]+)?","g"); + +class OrderedObjParser{ + constructor(options){ + this.options = options; + this.currentNode = null; + this.tagsNodeStack = []; + this.docTypeEntities = {}; + this.lastEntities = { + "apos" : { regex: /&(apos|#39|#x27);/g, val : "'"}, + "gt" : { regex: /&(gt|#62|#x3E);/g, val : ">"}, + "lt" : { regex: /&(lt|#60|#x3C);/g, val : "<"}, + "quot" : { regex: /&(quot|#34|#x22);/g, val : "\""}, + }; + this.ampEntity = { regex: /&(amp|#38|#x26);/g, val : "&"}; + this.htmlEntities = { + "space": { regex: /&(nbsp|#160);/g, val: " " }, + // "lt" : { regex: /&(lt|#60);/g, val: "<" }, + // "gt" : { regex: /&(gt|#62);/g, val: ">" }, + // "amp" : { regex: /&(amp|#38);/g, val: "&" }, + // "quot" : { regex: /&(quot|#34);/g, val: "\"" }, + // "apos" : { regex: /&(apos|#39);/g, val: "'" }, + "cent" : { regex: /&(cent|#162);/g, val: "¢" }, + "pound" : { regex: /&(pound|#163);/g, val: "£" }, + "yen" : { regex: /&(yen|#165);/g, val: "¥" }, + "euro" : { regex: /&(euro|#8364);/g, val: "€" }, + "copyright" : { regex: /&(copy|#169);/g, val: "©" }, + "reg" : { regex: /&(reg|#174);/g, val: "®" }, + "inr" : { regex: /&(inr|#8377);/g, val: "₹" }, + "num_dec": { regex: /&#([0-9]{1,7});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 10)) }, + "num_hex": { regex: /&#x([0-9a-fA-F]{1,6});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 16)) }, + }; + this.addExternalEntities = addExternalEntities; + this.parseXml = parseXml; + this.parseTextData = parseTextData; + this.resolveNameSpace = resolveNameSpace; + this.buildAttributesMap = buildAttributesMap; + this.isItStopNode = isItStopNode; + this.replaceEntitiesValue = replaceEntitiesValue; + this.readStopNodeData = readStopNodeData; + this.saveTextToParentTag = saveTextToParentTag; + this.addChild = addChild; + this.ignoreAttributesFn = getIgnoreAttributesFn(this.options.ignoreAttributes) + } + +} + +function addExternalEntities(externalEntities){ + const entKeys = Object.keys(externalEntities); + for (let i = 0; i < entKeys.length; i++) { + const ent = entKeys[i]; + this.lastEntities[ent] = { + regex: new RegExp("&"+ent+";","g"), + val : externalEntities[ent] + } + } +} + +/** + * @param {string} val + * @param {string} tagName + * @param {string} jPath + * @param {boolean} dontTrim + * @param {boolean} hasAttributes + * @param {boolean} isLeafNode + * @param {boolean} escapeEntities + */ +function parseTextData(val, tagName, jPath, dontTrim, hasAttributes, isLeafNode, escapeEntities) { + if (val !== undefined) { + if (this.options.trimValues && !dontTrim) { + val = val.trim(); + } + if(val.length > 0){ + if(!escapeEntities) val = this.replaceEntitiesValue(val); + + const newval = this.options.tagValueProcessor(tagName, val, jPath, hasAttributes, isLeafNode); + if(newval === null || newval === undefined){ + //don't parse + return val; + }else if(typeof newval !== typeof val || newval !== val){ + //overwrite + return newval; + }else if(this.options.trimValues){ + return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); + }else{ + const trimmedVal = val.trim(); + if(trimmedVal === val){ + return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); + }else{ + return val; + } + } + } + } +} + +function resolveNameSpace(tagname) { + if (this.options.removeNSPrefix) { + const tags = tagname.split(':'); + const prefix = tagname.charAt(0) === '/' ? '/' : ''; + if (tags[0] === 'xmlns') { + return ''; + } + if (tags.length === 2) { + tagname = prefix + tags[1]; + } + } + return tagname; +} + +//TODO: change regex to capture NS +//const attrsRegx = new RegExp("([\\w\\-\\.\\:]+)\\s*=\\s*(['\"])((.|\n)*?)\\2","gm"); +const attrsRegx = new RegExp('([^\\s=]+)\\s*(=\\s*([\'"])([\\s\\S]*?)\\3)?', 'gm'); + +function buildAttributesMap(attrStr, jPath, tagName) { + if (this.options.ignoreAttributes !== true && typeof attrStr === 'string') { + // attrStr = attrStr.replace(/\r?\n/g, ' '); + //attrStr = attrStr || attrStr.trim(); + + const matches = util.getAllMatches(attrStr, attrsRegx); + const len = matches.length; //don't make it inline + const attrs = {}; + for (let i = 0; i < len; i++) { + const attrName = this.resolveNameSpace(matches[i][1]); + if (this.ignoreAttributesFn(attrName, jPath)) { + continue + } + let oldVal = matches[i][4]; + let aName = this.options.attributeNamePrefix + attrName; + if (attrName.length) { + if (this.options.transformAttributeName) { + aName = this.options.transformAttributeName(aName); + } + if(aName === "__proto__") aName = "#__proto__"; + if (oldVal !== undefined) { + if (this.options.trimValues) { + oldVal = oldVal.trim(); + } + oldVal = this.replaceEntitiesValue(oldVal); + const newVal = this.options.attributeValueProcessor(attrName, oldVal, jPath); + if(newVal === null || newVal === undefined){ + //don't parse + attrs[aName] = oldVal; + }else if(typeof newVal !== typeof oldVal || newVal !== oldVal){ + //overwrite + attrs[aName] = newVal; + }else{ + //parse + attrs[aName] = parseValue( + oldVal, + this.options.parseAttributeValue, + this.options.numberParseOptions + ); + } + } else if (this.options.allowBooleanAttributes) { + attrs[aName] = true; + } + } + } + if (!Object.keys(attrs).length) { + return; + } + if (this.options.attributesGroupName) { + const attrCollection = {}; + attrCollection[this.options.attributesGroupName] = attrs; + return attrCollection; + } + return attrs + } +} + +const parseXml = function(xmlData) { + xmlData = xmlData.replace(/\r\n?/g, "\n"); //TODO: remove this line + const xmlObj = new xmlNode('!xml'); + let currentNode = xmlObj; + let textData = ""; + let jPath = ""; + for(let i=0; i< xmlData.length; i++){//for each char in XML data + const ch = xmlData[i]; + if(ch === '<'){ + // const nextIndex = i+1; + // const _2ndChar = xmlData[nextIndex]; + if( xmlData[i+1] === '/') {//Closing Tag + const closeIndex = findClosingIndex(xmlData, ">", i, "Closing Tag is not closed.") + let tagName = xmlData.substring(i+2,closeIndex).trim(); + + if(this.options.removeNSPrefix){ + const colonIndex = tagName.indexOf(":"); + if(colonIndex !== -1){ + tagName = tagName.substr(colonIndex+1); + } + } + + if(this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + if(currentNode){ + textData = this.saveTextToParentTag(textData, currentNode, jPath); + } + + //check if last tag of nested tag was unpaired tag + const lastTagName = jPath.substring(jPath.lastIndexOf(".")+1); + if(tagName && this.options.unpairedTags.indexOf(tagName) !== -1 ){ + throw new Error(`Unpaired tag can not be used as closing tag: `); + } + let propIndex = 0 + if(lastTagName && this.options.unpairedTags.indexOf(lastTagName) !== -1 ){ + propIndex = jPath.lastIndexOf('.', jPath.lastIndexOf('.')-1) + this.tagsNodeStack.pop(); + }else{ + propIndex = jPath.lastIndexOf("."); + } + jPath = jPath.substring(0, propIndex); + + currentNode = this.tagsNodeStack.pop();//avoid recursion, set the parent tag scope + textData = ""; + i = closeIndex; + } else if( xmlData[i+1] === '?') { + + let tagData = readTagExp(xmlData,i, false, "?>"); + if(!tagData) throw new Error("Pi Tag is not closed."); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + if( (this.options.ignoreDeclaration && tagData.tagName === "?xml") || this.options.ignorePiTags){ + + }else{ + + const childNode = new xmlNode(tagData.tagName); + childNode.add(this.options.textNodeName, ""); + + if(tagData.tagName !== tagData.tagExp && tagData.attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagData.tagExp, jPath, tagData.tagName); + } + this.addChild(currentNode, childNode, jPath) + + } + + + i = tagData.closeIndex + 1; + } else if(xmlData.substr(i + 1, 3) === '!--') { + const endIndex = findClosingIndex(xmlData, "-->", i+4, "Comment is not closed.") + if(this.options.commentPropName){ + const comment = xmlData.substring(i + 4, endIndex - 2); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + + currentNode.add(this.options.commentPropName, [ { [this.options.textNodeName] : comment } ]); + } + i = endIndex; + } else if( xmlData.substr(i + 1, 2) === '!D') { + const result = readDocType(xmlData, i); + this.docTypeEntities = result.entities; + i = result.i; + }else if(xmlData.substr(i + 1, 2) === '![') { + const closeIndex = findClosingIndex(xmlData, "]]>", i, "CDATA is not closed.") - 2; + const tagExp = xmlData.substring(i + 9,closeIndex); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + + let val = this.parseTextData(tagExp, currentNode.tagname, jPath, true, false, true, true); + if(val == undefined) val = ""; + + //cdata should be set even if it is 0 length string + if(this.options.cdataPropName){ + currentNode.add(this.options.cdataPropName, [ { [this.options.textNodeName] : tagExp } ]); + }else{ + currentNode.add(this.options.textNodeName, val); + } + + i = closeIndex + 2; + }else {//Opening tag + let result = readTagExp(xmlData,i, this.options.removeNSPrefix); + let tagName= result.tagName; + const rawTagName = result.rawTagName; + let tagExp = result.tagExp; + let attrExpPresent = result.attrExpPresent; + let closeIndex = result.closeIndex; + + if (this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + //save text as child node + if (currentNode && textData) { + if(currentNode.tagname !== '!xml'){ + //when nested tag is found + textData = this.saveTextToParentTag(textData, currentNode, jPath, false); + } + } + + //check if last tag was unpaired tag + const lastTag = currentNode; + if(lastTag && this.options.unpairedTags.indexOf(lastTag.tagname) !== -1 ){ + currentNode = this.tagsNodeStack.pop(); + jPath = jPath.substring(0, jPath.lastIndexOf(".")); + } + if(tagName !== xmlObj.tagname){ + jPath += jPath ? "." + tagName : tagName; + } + if (this.isItStopNode(this.options.stopNodes, jPath, tagName)) { + let tagContent = ""; + //self-closing tag + if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ + if(tagName[tagName.length - 1] === "/"){ //remove trailing '/' + tagName = tagName.substr(0, tagName.length - 1); + jPath = jPath.substr(0, jPath.length - 1); + tagExp = tagName; + }else{ + tagExp = tagExp.substr(0, tagExp.length - 1); + } + i = result.closeIndex; + } + //unpaired tag + else if(this.options.unpairedTags.indexOf(tagName) !== -1){ + + i = result.closeIndex; + } + //normal tag + else{ + //read until closing tag is found + const result = this.readStopNodeData(xmlData, rawTagName, closeIndex + 1); + if(!result) throw new Error(`Unexpected end of ${rawTagName}`); + i = result.i; + tagContent = result.tagContent; + } + + const childNode = new xmlNode(tagName); + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + if(tagContent) { + tagContent = this.parseTextData(tagContent, tagName, jPath, true, attrExpPresent, true, true); + } + + jPath = jPath.substr(0, jPath.lastIndexOf(".")); + childNode.add(this.options.textNodeName, tagContent); + + this.addChild(currentNode, childNode, jPath) + }else{ + //selfClosing tag + if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ + if(tagName[tagName.length - 1] === "/"){ //remove trailing '/' + tagName = tagName.substr(0, tagName.length - 1); + jPath = jPath.substr(0, jPath.length - 1); + tagExp = tagName; + }else{ + tagExp = tagExp.substr(0, tagExp.length - 1); + } + + if(this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + const childNode = new xmlNode(tagName); + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + this.addChild(currentNode, childNode, jPath) + jPath = jPath.substr(0, jPath.lastIndexOf(".")); + } + //opening tag + else{ + const childNode = new xmlNode( tagName); + this.tagsNodeStack.push(currentNode); + + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + this.addChild(currentNode, childNode, jPath) + currentNode = childNode; + } + textData = ""; + i = closeIndex; + } + } + }else{ + textData += xmlData[i]; + } + } + return xmlObj.child; +} + +function addChild(currentNode, childNode, jPath){ + const result = this.options.updateTag(childNode.tagname, jPath, childNode[":@"]) + if(result === false){ + }else if(typeof result === "string"){ + childNode.tagname = result + currentNode.addChild(childNode); + }else{ + currentNode.addChild(childNode); + } +} + +const replaceEntitiesValue = function(val){ + + if(this.options.processEntities){ + for(let entityName in this.docTypeEntities){ + const entity = this.docTypeEntities[entityName]; + val = val.replace( entity.regx, entity.val); + } + for(let entityName in this.lastEntities){ + const entity = this.lastEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + if(this.options.htmlEntities){ + for(let entityName in this.htmlEntities){ + const entity = this.htmlEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + } + val = val.replace( this.ampEntity.regex, this.ampEntity.val); + } + return val; +} +function saveTextToParentTag(textData, currentNode, jPath, isLeafNode) { + if (textData) { //store previously collected data as textNode + if(isLeafNode === undefined) isLeafNode = currentNode.child.length === 0 + + textData = this.parseTextData(textData, + currentNode.tagname, + jPath, + false, + currentNode[":@"] ? Object.keys(currentNode[":@"]).length !== 0 : false, + isLeafNode); + + if (textData !== undefined && textData !== "") + currentNode.add(this.options.textNodeName, textData); + textData = ""; + } + return textData; +} + +//TODO: use jPath to simplify the logic +/** + * + * @param {string[]} stopNodes + * @param {string} jPath + * @param {string} currentTagName + */ +function isItStopNode(stopNodes, jPath, currentTagName){ + const allNodesExp = "*." + currentTagName; + for (const stopNodePath in stopNodes) { + const stopNodeExp = stopNodes[stopNodePath]; + if( allNodesExp === stopNodeExp || jPath === stopNodeExp ) return true; + } + return false; +} + +/** + * Returns the tag Expression and where it is ending handling single-double quotes situation + * @param {string} xmlData + * @param {number} i starting index + * @returns + */ +function tagExpWithClosingIndex(xmlData, i, closingChar = ">"){ + let attrBoundary; + let tagExp = ""; + for (let index = i; index < xmlData.length; index++) { + let ch = xmlData[index]; + if (attrBoundary) { + if (ch === attrBoundary) attrBoundary = "";//reset + } else if (ch === '"' || ch === "'") { + attrBoundary = ch; + } else if (ch === closingChar[0]) { + if(closingChar[1]){ + if(xmlData[index + 1] === closingChar[1]){ + return { + data: tagExp, + index: index + } + } + }else{ + return { + data: tagExp, + index: index + } + } + } else if (ch === '\t') { + ch = " " + } + tagExp += ch; + } +} + +function findClosingIndex(xmlData, str, i, errMsg){ + const closingIndex = xmlData.indexOf(str, i); + if(closingIndex === -1){ + throw new Error(errMsg) + }else{ + return closingIndex + str.length - 1; + } +} + +function readTagExp(xmlData,i, removeNSPrefix, closingChar = ">"){ + const result = tagExpWithClosingIndex(xmlData, i+1, closingChar); + if(!result) return; + let tagExp = result.data; + const closeIndex = result.index; + const separatorIndex = tagExp.search(/\s/); + let tagName = tagExp; + let attrExpPresent = true; + if(separatorIndex !== -1){//separate tag name and attributes expression + tagName = tagExp.substring(0, separatorIndex); + tagExp = tagExp.substring(separatorIndex + 1).trimStart(); + } + + const rawTagName = tagName; + if(removeNSPrefix){ + const colonIndex = tagName.indexOf(":"); + if(colonIndex !== -1){ + tagName = tagName.substr(colonIndex+1); + attrExpPresent = tagName !== result.data.substr(colonIndex + 1); + } + } + + return { + tagName: tagName, + tagExp: tagExp, + closeIndex: closeIndex, + attrExpPresent: attrExpPresent, + rawTagName: rawTagName, + } +} +/** + * find paired tag for a stop node + * @param {string} xmlData + * @param {string} tagName + * @param {number} i + */ +function readStopNodeData(xmlData, tagName, i){ + const startIndex = i; + // Starting at 1 since we already have an open tag + let openTagCount = 1; + + for (; i < xmlData.length; i++) { + if( xmlData[i] === "<"){ + if (xmlData[i+1] === "/") {//close tag + const closeIndex = findClosingIndex(xmlData, ">", i, `${tagName} is not closed`); + let closeTagName = xmlData.substring(i+2,closeIndex).trim(); + if(closeTagName === tagName){ + openTagCount--; + if (openTagCount === 0) { + return { + tagContent: xmlData.substring(startIndex, i), + i : closeIndex + } + } + } + i=closeIndex; + } else if(xmlData[i+1] === '?') { + const closeIndex = findClosingIndex(xmlData, "?>", i+1, "StopNode is not closed.") + i=closeIndex; + } else if(xmlData.substr(i + 1, 3) === '!--') { + const closeIndex = findClosingIndex(xmlData, "-->", i+3, "StopNode is not closed.") + i=closeIndex; + } else if(xmlData.substr(i + 1, 2) === '![') { + const closeIndex = findClosingIndex(xmlData, "]]>", i, "StopNode is not closed.") - 2; + i=closeIndex; + } else { + const tagData = readTagExp(xmlData, i, '>') + + if (tagData) { + const openTagName = tagData && tagData.tagName; + if (openTagName === tagName && tagData.tagExp[tagData.tagExp.length-1] !== "/") { + openTagCount++; + } + i=tagData.closeIndex; + } + } + } + }//end for loop +} + +function parseValue(val, shouldParse, options) { + if (shouldParse && typeof val === 'string') { + //console.log(options) + const newval = val.trim(); + if(newval === 'true' ) return true; + else if(newval === 'false' ) return false; + else return toNumber(val, options); + } else { + if (util.isExist(val)) { + return val; + } else { + return ''; + } + } +} + + +module.exports = OrderedObjParser; + + +/***/ }), + +/***/ 9844: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const { buildOptions} = __nccwpck_require__(4769); +const OrderedObjParser = __nccwpck_require__(3017); +const { prettify} = __nccwpck_require__(7594); +const validator = __nccwpck_require__(9433); + +class XMLParser{ + + constructor(options){ + this.externalEntities = {}; + this.options = buildOptions(options); + + } + /** + * Parse XML dats to JS object + * @param {string|Buffer} xmlData + * @param {boolean|Object} validationOption + */ + parse(xmlData,validationOption){ + if(typeof xmlData === "string"){ + }else if( xmlData.toString){ + xmlData = xmlData.toString(); + }else{ + throw new Error("XML data is accepted in String or Bytes[] form.") + } + if( validationOption){ + if(validationOption === true) validationOption = {}; //validate with default options + + const result = validator.validate(xmlData, validationOption); + if (result !== true) { + throw Error( `${result.err.msg}:${result.err.line}:${result.err.col}` ) + } + } + const orderedObjParser = new OrderedObjParser(this.options); + orderedObjParser.addExternalEntities(this.externalEntities); + const orderedResult = orderedObjParser.parseXml(xmlData); + if(this.options.preserveOrder || orderedResult === undefined) return orderedResult; + else return prettify(orderedResult, this.options); + } + + /** + * Add Entity which is not by default supported by this library + * @param {string} key + * @param {string} value + */ + addEntity(key, value){ + if(value.indexOf("&") !== -1){ + throw new Error("Entity value can't have '&'") + }else if(key.indexOf("&") !== -1 || key.indexOf(";") !== -1){ + throw new Error("An entity must be set without '&' and ';'. Eg. use '#xD' for ' '") + }else if(value === "&"){ + throw new Error("An entity with value '&' is not permitted"); + }else{ + this.externalEntities[key] = value; + } + } +} + +module.exports = XMLParser; + +/***/ }), + +/***/ 7594: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +/** + * + * @param {array} node + * @param {any} options + * @returns + */ +function prettify(node, options){ + return compress( node, options); +} + +/** + * + * @param {array} arr + * @param {object} options + * @param {string} jPath + * @returns object + */ +function compress(arr, options, jPath){ + let text; + const compressedObj = {}; + for (let i = 0; i < arr.length; i++) { + const tagObj = arr[i]; + const property = propName(tagObj); + let newJpath = ""; + if(jPath === undefined) newJpath = property; + else newJpath = jPath + "." + property; + + if(property === options.textNodeName){ + if(text === undefined) text = tagObj[property]; + else text += "" + tagObj[property]; + }else if(property === undefined){ + continue; + }else if(tagObj[property]){ + + let val = compress(tagObj[property], options, newJpath); + const isLeaf = isLeafTag(val, options); + + if(tagObj[":@"]){ + assignAttributes( val, tagObj[":@"], newJpath, options); + }else if(Object.keys(val).length === 1 && val[options.textNodeName] !== undefined && !options.alwaysCreateTextNode){ + val = val[options.textNodeName]; + }else if(Object.keys(val).length === 0){ + if(options.alwaysCreateTextNode) val[options.textNodeName] = ""; + else val = ""; + } + + if(compressedObj[property] !== undefined && compressedObj.hasOwnProperty(property)) { + if(!Array.isArray(compressedObj[property])) { + compressedObj[property] = [ compressedObj[property] ]; + } + compressedObj[property].push(val); + }else{ + //TODO: if a node is not an array, then check if it should be an array + //also determine if it is a leaf node + if (options.isArray(property, newJpath, isLeaf )) { + compressedObj[property] = [val]; + }else{ + compressedObj[property] = val; + } + } + } + + } + // if(text && text.length > 0) compressedObj[options.textNodeName] = text; + if(typeof text === "string"){ + if(text.length > 0) compressedObj[options.textNodeName] = text; + }else if(text !== undefined) compressedObj[options.textNodeName] = text; + return compressedObj; +} + +function propName(obj){ + const keys = Object.keys(obj); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + if(key !== ":@") return key; + } +} + +function assignAttributes(obj, attrMap, jpath, options){ + if (attrMap) { + const keys = Object.keys(attrMap); + const len = keys.length; //don't make it inline + for (let i = 0; i < len; i++) { + const atrrName = keys[i]; + if (options.isArray(atrrName, jpath + "." + atrrName, true, true)) { + obj[atrrName] = [ attrMap[atrrName] ]; + } else { + obj[atrrName] = attrMap[atrrName]; + } + } + } +} + +function isLeafTag(obj, options){ + const { textNodeName } = options; + const propCount = Object.keys(obj).length; + + if (propCount === 0) { + return true; + } + + if ( + propCount === 1 && + (obj[textNodeName] || typeof obj[textNodeName] === "boolean" || obj[textNodeName] === 0) + ) { + return true; + } + + return false; +} +exports.prettify = prettify; + + +/***/ }), + +/***/ 9307: +/***/ ((module) => { + +"use strict"; + + +class XmlNode{ + constructor(tagname) { + this.tagname = tagname; + this.child = []; //nested tags, text, cdata, comments in order + this[":@"] = {}; //attributes map + } + add(key,val){ + // this.child.push( {name : key, val: val, isCdata: isCdata }); + if(key === "__proto__") key = "#__proto__"; + this.child.push( {[key]: val }); + } + addChild(node) { + if(node.tagname === "__proto__") node.tagname = "#__proto__"; + if(node[":@"] && Object.keys(node[":@"]).length > 0){ + this.child.push( { [node.tagname]: node.child, [":@"]: node[":@"] }); + }else{ + this.child.push( { [node.tagname]: node.child }); + } + }; +}; + + +module.exports = XmlNode; + +/***/ }), + +/***/ 7506: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +var _a; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GaxiosError = exports.GAXIOS_ERROR_SYMBOL = void 0; +exports.defaultErrorRedactor = defaultErrorRedactor; +const url_1 = __nccwpck_require__(7016); +const util_1 = __nccwpck_require__(3155); +const extend_1 = __importDefault(__nccwpck_require__(3860)); +/** + * Support `instanceof` operator for `GaxiosError`s in different versions of this library. + * + * @see {@link GaxiosError[Symbol.hasInstance]} + */ +exports.GAXIOS_ERROR_SYMBOL = Symbol.for(`${util_1.pkg.name}-gaxios-error`); +/* eslint-disable-next-line @typescript-eslint/no-explicit-any */ +class GaxiosError extends Error { + /** + * Support `instanceof` operator for `GaxiosError` across builds/duplicated files. + * + * @see {@link GAXIOS_ERROR_SYMBOL} + * @see {@link GaxiosError[GAXIOS_ERROR_SYMBOL]} + */ + static [(_a = exports.GAXIOS_ERROR_SYMBOL, Symbol.hasInstance)](instance) { + if (instance && + typeof instance === 'object' && + exports.GAXIOS_ERROR_SYMBOL in instance && + instance[exports.GAXIOS_ERROR_SYMBOL] === util_1.pkg.version) { + return true; + } + // fallback to native + return Function.prototype[Symbol.hasInstance].call(GaxiosError, instance); + } + constructor(message, config, response, error) { + var _b; + super(message); + this.config = config; + this.response = response; + this.error = error; + /** + * Support `instanceof` operator for `GaxiosError` across builds/duplicated files. + * + * @see {@link GAXIOS_ERROR_SYMBOL} + * @see {@link GaxiosError[Symbol.hasInstance]} + * @see {@link https://github.com/microsoft/TypeScript/issues/13965#issuecomment-278570200} + * @see {@link https://stackoverflow.com/questions/46618852/require-and-instanceof} + * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/@@hasInstance#reverting_to_default_instanceof_behavior} + */ + this[_a] = util_1.pkg.version; + // deep-copy config as we do not want to mutate + // the existing config for future retries/use + this.config = (0, extend_1.default)(true, {}, config); + if (this.response) { + this.response.config = (0, extend_1.default)(true, {}, this.response.config); + } + if (this.response) { + try { + this.response.data = translateData(this.config.responseType, (_b = this.response) === null || _b === void 0 ? void 0 : _b.data); + } + catch (_c) { + // best effort - don't throw an error within an error + // we could set `this.response.config.responseType = 'unknown'`, but + // that would mutate future calls with this config object. + } + this.status = this.response.status; + } + if (error && 'code' in error && error.code) { + this.code = error.code; + } + if (config.errorRedactor) { + config.errorRedactor({ + config: this.config, + response: this.response, + }); + } + } +} +exports.GaxiosError = GaxiosError; +function translateData(responseType, data) { + switch (responseType) { + case 'stream': + return data; + case 'json': + return JSON.parse(JSON.stringify(data)); + case 'arraybuffer': + return JSON.parse(Buffer.from(data).toString('utf8')); + case 'blob': + return JSON.parse(data.text()); + default: + return data; + } +} +/** + * An experimental error redactor. + * + * @param config Config to potentially redact properties of + * @param response Config to potentially redact properties of + * + * @experimental + */ +function defaultErrorRedactor(data) { + const REDACT = '< - See `errorRedactor` option in `gaxios` for configuration>.'; + function redactHeaders(headers) { + if (!headers) + return; + for (const key of Object.keys(headers)) { + // any casing of `Authentication` + if (/^authentication$/i.test(key)) { + headers[key] = REDACT; + } + // any casing of `Authorization` + if (/^authorization$/i.test(key)) { + headers[key] = REDACT; + } + // anything containing secret, such as 'client secret' + if (/secret/i.test(key)) { + headers[key] = REDACT; + } + } + } + function redactString(obj, key) { + if (typeof obj === 'object' && + obj !== null && + typeof obj[key] === 'string') { + const text = obj[key]; + if (/grant_type=/i.test(text) || + /assertion=/i.test(text) || + /secret/i.test(text)) { + obj[key] = REDACT; + } + } + } + function redactObject(obj) { + if (typeof obj === 'object' && obj !== null) { + if ('grant_type' in obj) { + obj['grant_type'] = REDACT; + } + if ('assertion' in obj) { + obj['assertion'] = REDACT; + } + if ('client_secret' in obj) { + obj['client_secret'] = REDACT; + } + } + } + if (data.config) { + redactHeaders(data.config.headers); + redactString(data.config, 'data'); + redactObject(data.config.data); + redactString(data.config, 'body'); + redactObject(data.config.body); + try { + const url = new url_1.URL('', data.config.url); + if (url.searchParams.has('token')) { + url.searchParams.set('token', REDACT); + } + if (url.searchParams.has('client_secret')) { + url.searchParams.set('client_secret', REDACT); + } + data.config.url = url.toString(); + } + catch (_b) { + // ignore error - no need to parse an invalid URL + } + } + if (data.response) { + defaultErrorRedactor({ config: data.response.config }); + redactHeaders(data.response.headers); + redactString(data.response, 'data'); + redactObject(data.response.data); + } + return data; +} +//# sourceMappingURL=common.js.map + +/***/ }), + +/***/ 6010: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +var _Gaxios_instances, _a, _Gaxios_urlMayUseProxy, _Gaxios_applyRequestInterceptors, _Gaxios_applyResponseInterceptors, _Gaxios_prepareRequest, _Gaxios_proxyAgent, _Gaxios_getProxyAgent; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Gaxios = void 0; +const extend_1 = __importDefault(__nccwpck_require__(3860)); +const https_1 = __nccwpck_require__(5692); +const node_fetch_1 = __importDefault(__nccwpck_require__(6705)); +const querystring_1 = __importDefault(__nccwpck_require__(3480)); +const is_stream_1 = __importDefault(__nccwpck_require__(6543)); +const url_1 = __nccwpck_require__(7016); +const common_1 = __nccwpck_require__(7506); +const retry_1 = __nccwpck_require__(2789); +const stream_1 = __nccwpck_require__(2203); +const uuid_1 = __nccwpck_require__(3187); +const interceptor_1 = __nccwpck_require__(5608); +/* eslint-disable @typescript-eslint/no-explicit-any */ +const fetch = hasFetch() ? window.fetch : node_fetch_1.default; +function hasWindow() { + return typeof window !== 'undefined' && !!window; +} +function hasFetch() { + return hasWindow() && !!window.fetch; +} +function hasBuffer() { + return typeof Buffer !== 'undefined'; +} +function hasHeader(options, header) { + return !!getHeader(options, header); +} +function getHeader(options, header) { + header = header.toLowerCase(); + for (const key of Object.keys((options === null || options === void 0 ? void 0 : options.headers) || {})) { + if (header === key.toLowerCase()) { + return options.headers[key]; + } + } + return undefined; +} +class Gaxios { + /** + * The Gaxios class is responsible for making HTTP requests. + * @param defaults The default set of options to be used for this instance. + */ + constructor(defaults) { + _Gaxios_instances.add(this); + this.agentCache = new Map(); + this.defaults = defaults || {}; + this.interceptors = { + request: new interceptor_1.GaxiosInterceptorManager(), + response: new interceptor_1.GaxiosInterceptorManager(), + }; + } + /** + * Perform an HTTP request with the given options. + * @param opts Set of HTTP options that will be used for this HTTP request. + */ + async request(opts = {}) { + opts = await __classPrivateFieldGet(this, _Gaxios_instances, "m", _Gaxios_prepareRequest).call(this, opts); + opts = await __classPrivateFieldGet(this, _Gaxios_instances, "m", _Gaxios_applyRequestInterceptors).call(this, opts); + return __classPrivateFieldGet(this, _Gaxios_instances, "m", _Gaxios_applyResponseInterceptors).call(this, this._request(opts)); + } + async _defaultAdapter(opts) { + const fetchImpl = opts.fetchImplementation || fetch; + const res = (await fetchImpl(opts.url, opts)); + const data = await this.getResponseData(opts, res); + return this.translateResponse(opts, res, data); + } + /** + * Internal, retryable version of the `request` method. + * @param opts Set of HTTP options that will be used for this HTTP request. + */ + async _request(opts = {}) { + var _b; + try { + let translatedResponse; + if (opts.adapter) { + translatedResponse = await opts.adapter(opts, this._defaultAdapter.bind(this)); + } + else { + translatedResponse = await this._defaultAdapter(opts); + } + if (!opts.validateStatus(translatedResponse.status)) { + if (opts.responseType === 'stream') { + let response = ''; + await new Promise(resolve => { + (translatedResponse === null || translatedResponse === void 0 ? void 0 : translatedResponse.data).on('data', chunk => { + response += chunk; + }); + (translatedResponse === null || translatedResponse === void 0 ? void 0 : translatedResponse.data).on('end', resolve); + }); + translatedResponse.data = response; + } + throw new common_1.GaxiosError(`Request failed with status code ${translatedResponse.status}`, opts, translatedResponse); + } + return translatedResponse; + } + catch (e) { + const err = e instanceof common_1.GaxiosError + ? e + : new common_1.GaxiosError(e.message, opts, undefined, e); + const { shouldRetry, config } = await (0, retry_1.getRetryConfig)(err); + if (shouldRetry && config) { + err.config.retryConfig.currentRetryAttempt = + config.retryConfig.currentRetryAttempt; + // The error's config could be redacted - therefore we only want to + // copy the retry state over to the existing config + opts.retryConfig = (_b = err.config) === null || _b === void 0 ? void 0 : _b.retryConfig; + return this._request(opts); + } + throw err; + } + } + async getResponseData(opts, res) { + switch (opts.responseType) { + case 'stream': + return res.body; + case 'json': { + let data = await res.text(); + try { + data = JSON.parse(data); + } + catch (_b) { + // continue + } + return data; + } + case 'arraybuffer': + return res.arrayBuffer(); + case 'blob': + return res.blob(); + case 'text': + return res.text(); + default: + return this.getResponseDataFromContentType(res); + } + } + /** + * By default, throw for any non-2xx status code + * @param status status code from the HTTP response + */ + validateStatus(status) { + return status >= 200 && status < 300; + } + /** + * Encode a set of key/value pars into a querystring format (?foo=bar&baz=boo) + * @param params key value pars to encode + */ + paramsSerializer(params) { + return querystring_1.default.stringify(params); + } + translateResponse(opts, res, data) { + // headers need to be converted from a map to an obj + const headers = {}; + res.headers.forEach((value, key) => { + headers[key] = value; + }); + return { + config: opts, + data: data, + headers, + status: res.status, + statusText: res.statusText, + // XMLHttpRequestLike + request: { + responseURL: res.url, + }, + }; + } + /** + * Attempts to parse a response by looking at the Content-Type header. + * @param {FetchResponse} response the HTTP response. + * @returns {Promise} a promise that resolves to the response data. + */ + async getResponseDataFromContentType(response) { + let contentType = response.headers.get('Content-Type'); + if (contentType === null) { + // Maintain existing functionality by calling text() + return response.text(); + } + contentType = contentType.toLowerCase(); + if (contentType.includes('application/json')) { + let data = await response.text(); + try { + data = JSON.parse(data); + } + catch (_b) { + // continue + } + return data; + } + else if (contentType.match(/^text\//)) { + return response.text(); + } + else { + // If the content type is something not easily handled, just return the raw data (blob) + return response.blob(); + } + } + /** + * Creates an async generator that yields the pieces of a multipart/related request body. + * This implementation follows the spec: https://www.ietf.org/rfc/rfc2387.txt. However, recursive + * multipart/related requests are not currently supported. + * + * @param {GaxioMultipartOptions[]} multipartOptions the pieces to turn into a multipart/related body. + * @param {string} boundary the boundary string to be placed between each part. + */ + async *getMultipartRequest(multipartOptions, boundary) { + const finale = `--${boundary}--`; + for (const currentPart of multipartOptions) { + const partContentType = currentPart.headers['Content-Type'] || 'application/octet-stream'; + const preamble = `--${boundary}\r\nContent-Type: ${partContentType}\r\n\r\n`; + yield preamble; + if (typeof currentPart.content === 'string') { + yield currentPart.content; + } + else { + yield* currentPart.content; + } + yield '\r\n'; + } + yield finale; + } +} +exports.Gaxios = Gaxios; +_a = Gaxios, _Gaxios_instances = new WeakSet(), _Gaxios_urlMayUseProxy = function _Gaxios_urlMayUseProxy(url, noProxy = []) { + var _b, _c; + const candidate = new url_1.URL(url); + const noProxyList = [...noProxy]; + const noProxyEnvList = ((_c = ((_b = process.env.NO_PROXY) !== null && _b !== void 0 ? _b : process.env.no_proxy)) === null || _c === void 0 ? void 0 : _c.split(',')) || []; + for (const rule of noProxyEnvList) { + noProxyList.push(rule.trim()); + } + for (const rule of noProxyList) { + // Match regex + if (rule instanceof RegExp) { + if (rule.test(candidate.toString())) { + return false; + } + } + // Match URL + else if (rule instanceof url_1.URL) { + if (rule.origin === candidate.origin) { + return false; + } + } + // Match string regex + else if (rule.startsWith('*.') || rule.startsWith('.')) { + const cleanedRule = rule.replace(/^\*\./, '.'); + if (candidate.hostname.endsWith(cleanedRule)) { + return false; + } + } + // Basic string match + else if (rule === candidate.origin || + rule === candidate.hostname || + rule === candidate.href) { + return false; + } + } + return true; +}, _Gaxios_applyRequestInterceptors = +/** + * Applies the request interceptors. The request interceptors are applied after the + * call to prepareRequest is completed. + * + * @param {GaxiosOptions} options The current set of options. + * + * @returns {Promise} Promise that resolves to the set of options or response after interceptors are applied. + */ +async function _Gaxios_applyRequestInterceptors(options) { + let promiseChain = Promise.resolve(options); + for (const interceptor of this.interceptors.request.values()) { + if (interceptor) { + promiseChain = promiseChain.then(interceptor.resolved, interceptor.rejected); + } + } + return promiseChain; +}, _Gaxios_applyResponseInterceptors = +/** + * Applies the response interceptors. The response interceptors are applied after the + * call to request is made. + * + * @param {GaxiosOptions} options The current set of options. + * + * @returns {Promise} Promise that resolves to the set of options or response after interceptors are applied. + */ +async function _Gaxios_applyResponseInterceptors(response) { + let promiseChain = Promise.resolve(response); + for (const interceptor of this.interceptors.response.values()) { + if (interceptor) { + promiseChain = promiseChain.then(interceptor.resolved, interceptor.rejected); + } + } + return promiseChain; +}, _Gaxios_prepareRequest = +/** + * Validates the options, merges them with defaults, and prepare request. + * + * @param options The original options passed from the client. + * @returns Prepared options, ready to make a request + */ +async function _Gaxios_prepareRequest(options) { + var _b, _c, _d, _e; + const opts = (0, extend_1.default)(true, {}, this.defaults, options); + if (!opts.url) { + throw new Error('URL is required.'); + } + // baseUrl has been deprecated, remove in 2.0 + const baseUrl = opts.baseUrl || opts.baseURL; + if (baseUrl) { + opts.url = baseUrl.toString() + opts.url; + } + opts.paramsSerializer = opts.paramsSerializer || this.paramsSerializer; + if (opts.params && Object.keys(opts.params).length > 0) { + let additionalQueryParams = opts.paramsSerializer(opts.params); + if (additionalQueryParams.startsWith('?')) { + additionalQueryParams = additionalQueryParams.slice(1); + } + const prefix = opts.url.toString().includes('?') ? '&' : '?'; + opts.url = opts.url + prefix + additionalQueryParams; + } + if (typeof options.maxContentLength === 'number') { + opts.size = options.maxContentLength; + } + if (typeof options.maxRedirects === 'number') { + opts.follow = options.maxRedirects; + } + opts.headers = opts.headers || {}; + if (opts.multipart === undefined && opts.data) { + const isFormData = typeof FormData === 'undefined' + ? false + : (opts === null || opts === void 0 ? void 0 : opts.data) instanceof FormData; + if (is_stream_1.default.readable(opts.data)) { + opts.body = opts.data; + } + else if (hasBuffer() && Buffer.isBuffer(opts.data)) { + // Do not attempt to JSON.stringify() a Buffer: + opts.body = opts.data; + if (!hasHeader(opts, 'Content-Type')) { + opts.headers['Content-Type'] = 'application/json'; + } + } + else if (typeof opts.data === 'object') { + // If www-form-urlencoded content type has been set, but data is + // provided as an object, serialize the content using querystring: + if (!isFormData) { + if (getHeader(opts, 'content-type') === + 'application/x-www-form-urlencoded') { + opts.body = opts.paramsSerializer(opts.data); + } + else { + // } else if (!(opts.data instanceof FormData)) { + if (!hasHeader(opts, 'Content-Type')) { + opts.headers['Content-Type'] = 'application/json'; + } + opts.body = JSON.stringify(opts.data); + } + } + } + else { + opts.body = opts.data; + } + } + else if (opts.multipart && opts.multipart.length > 0) { + // note: once the minimum version reaches Node 16, + // this can be replaced with randomUUID() function from crypto + // and the dependency on UUID removed + const boundary = (0, uuid_1.v4)(); + opts.headers['Content-Type'] = `multipart/related; boundary=${boundary}`; + const bodyStream = new stream_1.PassThrough(); + opts.body = bodyStream; + (0, stream_1.pipeline)(this.getMultipartRequest(opts.multipart, boundary), bodyStream, () => { }); + } + opts.validateStatus = opts.validateStatus || this.validateStatus; + opts.responseType = opts.responseType || 'unknown'; + if (!opts.headers['Accept'] && opts.responseType === 'json') { + opts.headers['Accept'] = 'application/json'; + } + opts.method = opts.method || 'GET'; + const proxy = opts.proxy || + ((_b = process === null || process === void 0 ? void 0 : process.env) === null || _b === void 0 ? void 0 : _b.HTTPS_PROXY) || + ((_c = process === null || process === void 0 ? void 0 : process.env) === null || _c === void 0 ? void 0 : _c.https_proxy) || + ((_d = process === null || process === void 0 ? void 0 : process.env) === null || _d === void 0 ? void 0 : _d.HTTP_PROXY) || + ((_e = process === null || process === void 0 ? void 0 : process.env) === null || _e === void 0 ? void 0 : _e.http_proxy); + const urlMayUseProxy = __classPrivateFieldGet(this, _Gaxios_instances, "m", _Gaxios_urlMayUseProxy).call(this, opts.url, opts.noProxy); + if (opts.agent) { + // don't do any of the following options - use the user-provided agent. + } + else if (proxy && urlMayUseProxy) { + const HttpsProxyAgent = await __classPrivateFieldGet(_a, _a, "m", _Gaxios_getProxyAgent).call(_a); + if (this.agentCache.has(proxy)) { + opts.agent = this.agentCache.get(proxy); + } + else { + opts.agent = new HttpsProxyAgent(proxy, { + cert: opts.cert, + key: opts.key, + }); + this.agentCache.set(proxy, opts.agent); + } + } + else if (opts.cert && opts.key) { + // Configure client for mTLS + if (this.agentCache.has(opts.key)) { + opts.agent = this.agentCache.get(opts.key); + } + else { + opts.agent = new https_1.Agent({ + cert: opts.cert, + key: opts.key, + }); + this.agentCache.set(opts.key, opts.agent); + } + } + if (typeof opts.errorRedactor !== 'function' && + opts.errorRedactor !== false) { + opts.errorRedactor = common_1.defaultErrorRedactor; + } + return opts; +}, _Gaxios_getProxyAgent = async function _Gaxios_getProxyAgent() { + __classPrivateFieldSet(this, _a, __classPrivateFieldGet(this, _a, "f", _Gaxios_proxyAgent) || (await Promise.resolve().then(() => __importStar(__nccwpck_require__(3669)))).HttpsProxyAgent, "f", _Gaxios_proxyAgent); + return __classPrivateFieldGet(this, _a, "f", _Gaxios_proxyAgent); +}; +/** + * A cache for the lazily-loaded proxy agent. + * + * Should use {@link Gaxios[#getProxyAgent]} to retrieve. + */ +// using `import` to dynamically import the types here +_Gaxios_proxyAgent = { value: void 0 }; +//# sourceMappingURL=gaxios.js.map + +/***/ }), + +/***/ 7003: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.instance = exports.Gaxios = exports.GaxiosError = void 0; +exports.request = request; +const gaxios_1 = __nccwpck_require__(6010); +Object.defineProperty(exports, "Gaxios", ({ enumerable: true, get: function () { return gaxios_1.Gaxios; } })); +var common_1 = __nccwpck_require__(7506); +Object.defineProperty(exports, "GaxiosError", ({ enumerable: true, get: function () { return common_1.GaxiosError; } })); +__exportStar(__nccwpck_require__(5608), exports); +/** + * The default instance used when the `request` method is directly + * invoked. + */ +exports.instance = new gaxios_1.Gaxios(); +/** + * Make an HTTP request using the given options. + * @param opts Options for the request + */ +async function request(opts) { + return exports.instance.request(opts); +} +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 5608: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2024 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GaxiosInterceptorManager = void 0; +/** + * Class to manage collections of GaxiosInterceptors for both requests and responses. + */ +class GaxiosInterceptorManager extends Set { +} +exports.GaxiosInterceptorManager = GaxiosInterceptorManager; +//# sourceMappingURL=interceptor.js.map + +/***/ }), + +/***/ 2789: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2018 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getRetryConfig = getRetryConfig; +async function getRetryConfig(err) { + let config = getConfig(err); + if (!err || !err.config || (!config && !err.config.retry)) { + return { shouldRetry: false }; + } + config = config || {}; + config.currentRetryAttempt = config.currentRetryAttempt || 0; + config.retry = + config.retry === undefined || config.retry === null ? 3 : config.retry; + config.httpMethodsToRetry = config.httpMethodsToRetry || [ + 'GET', + 'HEAD', + 'PUT', + 'OPTIONS', + 'DELETE', + ]; + config.noResponseRetries = + config.noResponseRetries === undefined || config.noResponseRetries === null + ? 2 + : config.noResponseRetries; + config.retryDelayMultiplier = config.retryDelayMultiplier + ? config.retryDelayMultiplier + : 2; + config.timeOfFirstRequest = config.timeOfFirstRequest + ? config.timeOfFirstRequest + : Date.now(); + config.totalTimeout = config.totalTimeout + ? config.totalTimeout + : Number.MAX_SAFE_INTEGER; + config.maxRetryDelay = config.maxRetryDelay + ? config.maxRetryDelay + : Number.MAX_SAFE_INTEGER; + // If this wasn't in the list of status codes where we want + // to automatically retry, return. + const retryRanges = [ + // https://en.wikipedia.org/wiki/List_of_HTTP_status_codes + // 1xx - Retry (Informational, request still processing) + // 2xx - Do not retry (Success) + // 3xx - Do not retry (Redirect) + // 4xx - Do not retry (Client errors) + // 408 - Retry ("Request Timeout") + // 429 - Retry ("Too Many Requests") + // 5xx - Retry (Server errors) + [100, 199], + [408, 408], + [429, 429], + [500, 599], + ]; + config.statusCodesToRetry = config.statusCodesToRetry || retryRanges; + // Put the config back into the err + err.config.retryConfig = config; + // Determine if we should retry the request + const shouldRetryFn = config.shouldRetry || shouldRetryRequest; + if (!(await shouldRetryFn(err))) { + return { shouldRetry: false, config: err.config }; + } + const delay = getNextRetryDelay(config); + // We're going to retry! Incremenent the counter. + err.config.retryConfig.currentRetryAttempt += 1; + // Create a promise that invokes the retry after the backOffDelay + const backoff = config.retryBackoff + ? config.retryBackoff(err, delay) + : new Promise(resolve => { + setTimeout(resolve, delay); + }); + // Notify the user if they added an `onRetryAttempt` handler + if (config.onRetryAttempt) { + config.onRetryAttempt(err); + } + // Return the promise in which recalls Gaxios to retry the request + await backoff; + return { shouldRetry: true, config: err.config }; +} +/** + * Determine based on config if we should retry the request. + * @param err The GaxiosError passed to the interceptor. + */ +function shouldRetryRequest(err) { + var _a; + const config = getConfig(err); + // node-fetch raises an AbortError if signaled: + // https://github.com/bitinn/node-fetch#request-cancellation-with-abortsignal + if (err.name === 'AbortError' || ((_a = err.error) === null || _a === void 0 ? void 0 : _a.name) === 'AbortError') { + return false; + } + // If there's no config, or retries are disabled, return. + if (!config || config.retry === 0) { + return false; + } + // Check if this error has no response (ETIMEDOUT, ENOTFOUND, etc) + if (!err.response && + (config.currentRetryAttempt || 0) >= config.noResponseRetries) { + return false; + } + // Only retry with configured HttpMethods. + if (!err.config.method || + config.httpMethodsToRetry.indexOf(err.config.method.toUpperCase()) < 0) { + return false; + } + // If this wasn't in the list of status codes where we want + // to automatically retry, return. + if (err.response && err.response.status) { + let isInRange = false; + for (const [min, max] of config.statusCodesToRetry) { + const status = err.response.status; + if (status >= min && status <= max) { + isInRange = true; + break; + } + } + if (!isInRange) { + return false; + } + } + // If we are out of retry attempts, return + config.currentRetryAttempt = config.currentRetryAttempt || 0; + if (config.currentRetryAttempt >= config.retry) { + return false; + } + return true; +} +/** + * Acquire the raxConfig object from an GaxiosError if available. + * @param err The Gaxios error with a config object. + */ +function getConfig(err) { + if (err && err.config && err.config.retryConfig) { + return err.config.retryConfig; + } + return; +} +/** + * Gets the delay to wait before the next retry. + * + * @param {RetryConfig} config The current set of retry options + * @returns {number} the amount of ms to wait before the next retry attempt. + */ +function getNextRetryDelay(config) { + var _a; + // Calculate time to wait with exponential backoff. + // If this is the first retry, look for a configured retryDelay. + const retryDelay = config.currentRetryAttempt ? 0 : (_a = config.retryDelay) !== null && _a !== void 0 ? _a : 100; + // Formula: retryDelay + ((retryDelayMultiplier^currentRetryAttempt - 1 / 2) * 1000) + const calculatedDelay = retryDelay + + ((Math.pow(config.retryDelayMultiplier, config.currentRetryAttempt) - 1) / + 2) * + 1000; + const maxAllowableDelay = config.totalTimeout - (Date.now() - config.timeOfFirstRequest); + return Math.min(calculatedDelay, maxAllowableDelay, config.maxRetryDelay); +} +//# sourceMappingURL=retry.js.map + +/***/ }), + +/***/ 3155: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2023 Google LLC +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.pkg = void 0; +exports.pkg = __nccwpck_require__(6495); +//# sourceMappingURL=util.js.map + +/***/ }), + +/***/ 3187: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +Object.defineProperty(exports, "NIL", ({ + enumerable: true, + get: function () { + return _nil.default; + } +})); +Object.defineProperty(exports, "parse", ({ + enumerable: true, + get: function () { + return _parse.default; + } +})); +Object.defineProperty(exports, "stringify", ({ + enumerable: true, + get: function () { + return _stringify.default; + } +})); +Object.defineProperty(exports, "v1", ({ + enumerable: true, + get: function () { + return _v.default; + } +})); +Object.defineProperty(exports, "v3", ({ + enumerable: true, + get: function () { + return _v2.default; + } +})); +Object.defineProperty(exports, "v4", ({ + enumerable: true, + get: function () { + return _v3.default; + } +})); +Object.defineProperty(exports, "v5", ({ + enumerable: true, + get: function () { + return _v4.default; + } +})); +Object.defineProperty(exports, "validate", ({ + enumerable: true, + get: function () { + return _validate.default; + } +})); +Object.defineProperty(exports, "version", ({ + enumerable: true, + get: function () { + return _version.default; + } +})); + +var _v = _interopRequireDefault(__nccwpck_require__(6698)); + +var _v2 = _interopRequireDefault(__nccwpck_require__(6272)); + +var _v3 = _interopRequireDefault(__nccwpck_require__(8485)); + +var _v4 = _interopRequireDefault(__nccwpck_require__(742)); + +var _nil = _interopRequireDefault(__nccwpck_require__(8212)); + +var _version = _interopRequireDefault(__nccwpck_require__(6651)); + +var _validate = _interopRequireDefault(__nccwpck_require__(5913)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(4386)); + +var _parse = _interopRequireDefault(__nccwpck_require__(9884)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/***/ }), + +/***/ 1979: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); +} + +var _default = md5; +exports["default"] = _default; + +/***/ }), + +/***/ 3968: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var _default = { + randomUUID: _crypto.default.randomUUID +}; +exports["default"] = _default; + +/***/ }), + +/***/ 8212: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports["default"] = _default; + +/***/ }), + +/***/ 9884: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(5913)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports["default"] = _default; + +/***/ }), + +/***/ 956: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports["default"] = _default; + +/***/ }), + +/***/ 4566: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = rng; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; + +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); + + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} + +/***/ }), + +/***/ 5398: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('sha1').update(bytes).digest(); +} + +var _default = sha1; +exports["default"] = _default; + +/***/ }), + +/***/ 4386: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +exports.unsafeStringify = unsafeStringify; + +var _validate = _interopRequireDefault(__nccwpck_require__(5913)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).slice(1)); +} + +function unsafeStringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]; +} + +function stringify(arr, offset = 0) { + const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports["default"] = _default; + +/***/ }), + +/***/ 6698: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(4566)); + +var _stringify = __nccwpck_require__(4386); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.unsafeStringify)(b); +} + +var _default = v1; +exports["default"] = _default; + +/***/ }), + +/***/ 6272: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(2781)); + +var _md = _interopRequireDefault(__nccwpck_require__(1979)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports["default"] = _default; + +/***/ }), + +/***/ 2781: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports.URL = exports.DNS = void 0; +exports["default"] = v35; + +var _stringify = __nccwpck_require__(4386); + +var _parse = _interopRequireDefault(__nccwpck_require__(9884)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function v35(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + var _namespace; + + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} + +/***/ }), + +/***/ 8485: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _native = _interopRequireDefault(__nccwpck_require__(3968)); + +var _rng = _interopRequireDefault(__nccwpck_require__(4566)); + +var _stringify = __nccwpck_require__(4386); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + if (_native.default.randomUUID && !buf && !options) { + return _native.default.randomUUID(); + } + + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(rnds); +} + +var _default = v4; +exports["default"] = _default; + +/***/ }), + +/***/ 742: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(2781)); + +var _sha = _interopRequireDefault(__nccwpck_require__(5398)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports["default"] = _default; + +/***/ }), + +/***/ 5913: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _regex = _interopRequireDefault(__nccwpck_require__(956)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports["default"] = _default; + +/***/ }), + +/***/ 6651: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(5913)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.slice(14, 15), 16); +} + +var _default = version; +exports["default"] = _default; + +/***/ }), + +/***/ 381: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/** + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GCE_LINUX_BIOS_PATHS = void 0; +exports.isGoogleCloudServerless = isGoogleCloudServerless; +exports.isGoogleComputeEngineLinux = isGoogleComputeEngineLinux; +exports.isGoogleComputeEngineMACAddress = isGoogleComputeEngineMACAddress; +exports.isGoogleComputeEngine = isGoogleComputeEngine; +exports.detectGCPResidency = detectGCPResidency; +const fs_1 = __nccwpck_require__(9896); +const os_1 = __nccwpck_require__(857); +/** + * Known paths unique to Google Compute Engine Linux instances + */ +exports.GCE_LINUX_BIOS_PATHS = { + BIOS_DATE: '/sys/class/dmi/id/bios_date', + BIOS_VENDOR: '/sys/class/dmi/id/bios_vendor', +}; +const GCE_MAC_ADDRESS_REGEX = /^42:01/; +/** + * Determines if the process is running on a Google Cloud Serverless environment (Cloud Run or Cloud Functions instance). + * + * Uses the: + * - {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. + * - {@link https://cloud.google.com/functions/docs/env-var Cloud Functions environment variables}. + * + * @returns {boolean} `true` if the process is running on GCP serverless, `false` otherwise. + */ +function isGoogleCloudServerless() { + /** + * `CLOUD_RUN_JOB` is used for Cloud Run Jobs + * - See {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. + * + * `FUNCTION_NAME` is used in older Cloud Functions environments: + * - See {@link https://cloud.google.com/functions/docs/env-var Python 3.7 and Go 1.11}. + * + * `K_SERVICE` is used in Cloud Run and newer Cloud Functions environments: + * - See {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. + * - See {@link https://cloud.google.com/functions/docs/env-var Cloud Functions newer runtimes}. + */ + const isGFEnvironment = process.env.CLOUD_RUN_JOB || + process.env.FUNCTION_NAME || + process.env.K_SERVICE; + return !!isGFEnvironment; +} +/** + * Determines if the process is running on a Linux Google Compute Engine instance. + * + * @returns {boolean} `true` if the process is running on Linux GCE, `false` otherwise. + */ +function isGoogleComputeEngineLinux() { + if ((0, os_1.platform)() !== 'linux') + return false; + try { + // ensure this file exist + (0, fs_1.statSync)(exports.GCE_LINUX_BIOS_PATHS.BIOS_DATE); + // ensure this file exist and matches + const biosVendor = (0, fs_1.readFileSync)(exports.GCE_LINUX_BIOS_PATHS.BIOS_VENDOR, 'utf8'); + return /Google/.test(biosVendor); + } + catch (_a) { + return false; + } +} +/** + * Determines if the process is running on a Google Compute Engine instance with a known + * MAC address. + * + * @returns {boolean} `true` if the process is running on GCE (as determined by MAC address), `false` otherwise. + */ +function isGoogleComputeEngineMACAddress() { + const interfaces = (0, os_1.networkInterfaces)(); + for (const item of Object.values(interfaces)) { + if (!item) + continue; + for (const { mac } of item) { + if (GCE_MAC_ADDRESS_REGEX.test(mac)) { + return true; + } + } + } + return false; +} +/** + * Determines if the process is running on a Google Compute Engine instance. + * + * @returns {boolean} `true` if the process is running on GCE, `false` otherwise. + */ +function isGoogleComputeEngine() { + return isGoogleComputeEngineLinux() || isGoogleComputeEngineMACAddress(); +} +/** + * Determines if the process is running on Google Cloud Platform. + * + * @returns {boolean} `true` if the process is running on GCP, `false` otherwise. + */ +function detectGCPResidency() { + return isGoogleCloudServerless() || isGoogleComputeEngine(); +} +//# sourceMappingURL=gcp-residency.js.map + +/***/ }), + +/***/ 3046: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +/** + * Copyright 2018 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.gcpResidencyCache = exports.METADATA_SERVER_DETECTION = exports.HEADERS = exports.HEADER_VALUE = exports.HEADER_NAME = exports.SECONDARY_HOST_ADDRESS = exports.HOST_ADDRESS = exports.BASE_PATH = void 0; +exports.instance = instance; +exports.project = project; +exports.universe = universe; +exports.bulk = bulk; +exports.isAvailable = isAvailable; +exports.resetIsAvailableCache = resetIsAvailableCache; +exports.getGCPResidency = getGCPResidency; +exports.setGCPResidency = setGCPResidency; +exports.requestTimeout = requestTimeout; +const gaxios_1 = __nccwpck_require__(7003); +const jsonBigint = __nccwpck_require__(4826); +const gcp_residency_1 = __nccwpck_require__(381); +const logger = __nccwpck_require__(1577); +exports.BASE_PATH = '/computeMetadata/v1'; +exports.HOST_ADDRESS = 'http://169.254.169.254'; +exports.SECONDARY_HOST_ADDRESS = 'http://metadata.google.internal.'; +exports.HEADER_NAME = 'Metadata-Flavor'; +exports.HEADER_VALUE = 'Google'; +exports.HEADERS = Object.freeze({ [exports.HEADER_NAME]: exports.HEADER_VALUE }); +const log = logger.log('gcp metadata'); +/** + * Metadata server detection override options. + * + * Available via `process.env.METADATA_SERVER_DETECTION`. + */ +exports.METADATA_SERVER_DETECTION = Object.freeze({ + 'assume-present': "don't try to ping the metadata server, but assume it's present", + none: "don't try to ping the metadata server, but don't try to use it either", + 'bios-only': "treat the result of a BIOS probe as canonical (don't fall back to pinging)", + 'ping-only': 'skip the BIOS probe, and go straight to pinging', +}); +/** + * Returns the base URL while taking into account the GCE_METADATA_HOST + * environment variable if it exists. + * + * @returns The base URL, e.g., http://169.254.169.254/computeMetadata/v1. + */ +function getBaseUrl(baseUrl) { + if (!baseUrl) { + baseUrl = + process.env.GCE_METADATA_IP || + process.env.GCE_METADATA_HOST || + exports.HOST_ADDRESS; + } + // If no scheme is provided default to HTTP: + if (!/^https?:\/\//.test(baseUrl)) { + baseUrl = `http://${baseUrl}`; + } + return new URL(exports.BASE_PATH, baseUrl).href; +} +// Accepts an options object passed from the user to the API. In previous +// versions of the API, it referred to a `Request` or an `Axios` request +// options object. Now it refers to an object with very limited property +// names. This is here to help ensure users don't pass invalid options when +// they upgrade from 0.4 to 0.5 to 0.8. +function validate(options) { + Object.keys(options).forEach(key => { + switch (key) { + case 'params': + case 'property': + case 'headers': + break; + case 'qs': + throw new Error("'qs' is not a valid configuration option. Please use 'params' instead."); + default: + throw new Error(`'${key}' is not a valid configuration option.`); + } + }); +} +async function metadataAccessor(type, options = {}, noResponseRetries = 3, fastFail = false) { + let metadataKey = ''; + let params = {}; + let headers = {}; + if (typeof type === 'object') { + const metadataAccessor = type; + metadataKey = metadataAccessor.metadataKey; + params = metadataAccessor.params || params; + headers = metadataAccessor.headers || headers; + noResponseRetries = metadataAccessor.noResponseRetries || noResponseRetries; + fastFail = metadataAccessor.fastFail || fastFail; + } + else { + metadataKey = type; + } + if (typeof options === 'string') { + metadataKey += `/${options}`; + } + else { + validate(options); + if (options.property) { + metadataKey += `/${options.property}`; + } + headers = options.headers || headers; + params = options.params || params; + } + const requestMethod = fastFail ? fastFailMetadataRequest : gaxios_1.request; + const req = { + url: `${getBaseUrl()}/${metadataKey}`, + headers: { ...exports.HEADERS, ...headers }, + retryConfig: { noResponseRetries }, + params, + responseType: 'text', + timeout: requestTimeout(), + }; + log.info('instance request %j', req); + const res = await requestMethod(req); + log.info('instance metadata is %s', res.data); + // NOTE: node.js converts all incoming headers to lower case. + if (res.headers[exports.HEADER_NAME.toLowerCase()] !== exports.HEADER_VALUE) { + throw new Error(`Invalid response from metadata service: incorrect ${exports.HEADER_NAME} header. Expected '${exports.HEADER_VALUE}', got ${res.headers[exports.HEADER_NAME.toLowerCase()] ? `'${res.headers[exports.HEADER_NAME.toLowerCase()]}'` : 'no header'}`); + } + if (typeof res.data === 'string') { + try { + return jsonBigint.parse(res.data); + } + catch (_a) { + /* ignore */ + } + } + return res.data; +} +async function fastFailMetadataRequest(options) { + var _a; + const secondaryOptions = { + ...options, + url: (_a = options.url) === null || _a === void 0 ? void 0 : _a.toString().replace(getBaseUrl(), getBaseUrl(exports.SECONDARY_HOST_ADDRESS)), + }; + // We race a connection between DNS/IP to metadata server. There are a couple + // reasons for this: + // + // 1. the DNS is slow in some GCP environments; by checking both, we might + // detect the runtime environment signficantly faster. + // 2. we can't just check the IP, which is tarpitted and slow to respond + // on a user's local machine. + // + // Additional logic has been added to make sure that we don't create an + // unhandled rejection in scenarios where a failure happens sometime + // after a success. + // + // Note, however, if a failure happens prior to a success, a rejection should + // occur, this is for folks running locally. + // + let responded = false; + const r1 = (0, gaxios_1.request)(options) + .then(res => { + responded = true; + return res; + }) + .catch(err => { + if (responded) { + return r2; + } + else { + responded = true; + throw err; + } + }); + const r2 = (0, gaxios_1.request)(secondaryOptions) + .then(res => { + responded = true; + return res; + }) + .catch(err => { + if (responded) { + return r1; + } + else { + responded = true; + throw err; + } + }); + return Promise.race([r1, r2]); +} +/** + * Obtain metadata for the current GCE instance. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const serviceAccount: {} = await instance('service-accounts/'); + * const serviceAccountEmail: string = await instance('service-accounts/default/email'); + * ``` + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function instance(options) { + return metadataAccessor('instance', options); +} +/** + * Obtain metadata for the current GCP project. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const projectId: string = await project('project-id'); + * const numericProjectId: number = await project('numeric-project-id'); + * ``` + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function project(options) { + return metadataAccessor('project', options); +} +/** + * Obtain metadata for the current universe. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const universeDomain: string = await universe('universe-domain'); + * ``` + */ +function universe(options) { + return metadataAccessor('universe', options); +} +/** + * Retrieve metadata items in parallel. + * + * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} + * + * @example + * ``` + * const data = await bulk([ + * { + * metadataKey: 'instance', + * }, + * { + * metadataKey: 'project/project-id', + * }, + * ] as const); + * + * // data.instance; + * // data['project/project-id']; + * ``` + * + * @param properties The metadata properties to retrieve + * @returns The metadata in `metadatakey:value` format + */ +async function bulk(properties) { + const r = {}; + await Promise.all(properties.map(item => { + return (async () => { + const res = await metadataAccessor(item); + const key = item.metadataKey; + r[key] = res; + })(); + })); + return r; +} +/* + * How many times should we retry detecting GCP environment. + */ +function detectGCPAvailableRetries() { + return process.env.DETECT_GCP_RETRIES + ? Number(process.env.DETECT_GCP_RETRIES) + : 0; +} +let cachedIsAvailableResponse; +/** + * Determine if the metadata server is currently available. + */ +async function isAvailable() { + if (process.env.METADATA_SERVER_DETECTION) { + const value = process.env.METADATA_SERVER_DETECTION.trim().toLocaleLowerCase(); + if (!(value in exports.METADATA_SERVER_DETECTION)) { + throw new RangeError(`Unknown \`METADATA_SERVER_DETECTION\` env variable. Got \`${value}\`, but it should be \`${Object.keys(exports.METADATA_SERVER_DETECTION).join('`, `')}\`, or unset`); + } + switch (value) { + case 'assume-present': + return true; + case 'none': + return false; + case 'bios-only': + return getGCPResidency(); + case 'ping-only': + // continue, we want to ping the server + } + } + try { + // If a user is instantiating several GCP libraries at the same time, + // this may result in multiple calls to isAvailable(), to detect the + // runtime environment. We use the same promise for each of these calls + // to reduce the network load. + if (cachedIsAvailableResponse === undefined) { + cachedIsAvailableResponse = metadataAccessor('instance', undefined, detectGCPAvailableRetries(), + // If the default HOST_ADDRESS has been overridden, we should not + // make an effort to try SECONDARY_HOST_ADDRESS (as we are likely in + // a non-GCP environment): + !(process.env.GCE_METADATA_IP || process.env.GCE_METADATA_HOST)); + } + await cachedIsAvailableResponse; + return true; + } + catch (e) { + const err = e; + if (process.env.DEBUG_AUTH) { + console.info(err); + } + if (err.type === 'request-timeout') { + // If running in a GCP environment, metadata endpoint should return + // within ms. + return false; + } + if (err.response && err.response.status === 404) { + return false; + } + else { + if (!(err.response && err.response.status === 404) && + // A warning is emitted if we see an unexpected err.code, or err.code + // is not populated: + (!err.code || + ![ + 'EHOSTDOWN', + 'EHOSTUNREACH', + 'ENETUNREACH', + 'ENOENT', + 'ENOTFOUND', + 'ECONNREFUSED', + ].includes(err.code))) { + let code = 'UNKNOWN'; + if (err.code) + code = err.code; + process.emitWarning(`received unexpected error = ${err.message} code = ${code}`, 'MetadataLookupWarning'); + } + // Failure to resolve the metadata service means that it is not available. + return false; + } + } +} +/** + * reset the memoized isAvailable() lookup. + */ +function resetIsAvailableCache() { + cachedIsAvailableResponse = undefined; +} +/** + * A cache for the detected GCP Residency. + */ +exports.gcpResidencyCache = null; +/** + * Detects GCP Residency. + * Caches results to reduce costs for subsequent calls. + * + * @see setGCPResidency for setting + */ +function getGCPResidency() { + if (exports.gcpResidencyCache === null) { + setGCPResidency(); + } + return exports.gcpResidencyCache; +} +/** + * Sets the detected GCP Residency. + * Useful for forcing metadata server detection behavior. + * + * Set `null` to autodetect the environment (default behavior). + * @see getGCPResidency for getting + */ +function setGCPResidency(value = null) { + exports.gcpResidencyCache = value !== null ? value : (0, gcp_residency_1.detectGCPResidency)(); +} +/** + * Obtain the timeout for requests to the metadata server. + * + * In certain environments and conditions requests can take longer than + * the default timeout to complete. This function will determine the + * appropriate timeout based on the environment. + * + * @returns {number} a request timeout duration in milliseconds. + */ +function requestTimeout() { + return getGCPResidency() ? 0 : 3000; +} +__exportStar(__nccwpck_require__(381), exports); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 4810: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2012 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AuthClient = exports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS = exports.DEFAULT_UNIVERSE = void 0; +const events_1 = __nccwpck_require__(4434); +const gaxios_1 = __nccwpck_require__(7003); +const transporters_1 = __nccwpck_require__(7633); +const util_1 = __nccwpck_require__(7870); +/** + * The default cloud universe + * + * @see {@link AuthJSONOptions.universe_domain} + */ +exports.DEFAULT_UNIVERSE = 'googleapis.com'; +/** + * The default {@link AuthClientOptions.eagerRefreshThresholdMillis} + */ +exports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS = 5 * 60 * 1000; +class AuthClient extends events_1.EventEmitter { + constructor(opts = {}) { + var _a, _b, _c, _d, _e; + super(); + this.credentials = {}; + this.eagerRefreshThresholdMillis = exports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS; + this.forceRefreshOnFailure = false; + this.universeDomain = exports.DEFAULT_UNIVERSE; + const options = (0, util_1.originalOrCamelOptions)(opts); + // Shared auth options + this.apiKey = opts.apiKey; + this.projectId = (_a = options.get('project_id')) !== null && _a !== void 0 ? _a : null; + this.quotaProjectId = options.get('quota_project_id'); + this.credentials = (_b = options.get('credentials')) !== null && _b !== void 0 ? _b : {}; + this.universeDomain = (_c = options.get('universe_domain')) !== null && _c !== void 0 ? _c : exports.DEFAULT_UNIVERSE; + // Shared client options + this.transporter = (_d = opts.transporter) !== null && _d !== void 0 ? _d : new transporters_1.DefaultTransporter(); + if (opts.transporterOptions) { + this.transporter.defaults = opts.transporterOptions; + } + if (opts.eagerRefreshThresholdMillis) { + this.eagerRefreshThresholdMillis = opts.eagerRefreshThresholdMillis; + } + this.forceRefreshOnFailure = (_e = opts.forceRefreshOnFailure) !== null && _e !== void 0 ? _e : false; + } + /** + * Return the {@link Gaxios `Gaxios`} instance from the {@link AuthClient.transporter}. + * + * @expiremental + */ + get gaxios() { + if (this.transporter instanceof gaxios_1.Gaxios) { + return this.transporter; + } + else if (this.transporter instanceof transporters_1.DefaultTransporter) { + return this.transporter.instance; + } + else if ('instance' in this.transporter && + this.transporter.instance instanceof gaxios_1.Gaxios) { + return this.transporter.instance; + } + return null; + } + /** + * Sets the auth credentials. + */ + setCredentials(credentials) { + this.credentials = credentials; + } + /** + * Append additional headers, e.g., x-goog-user-project, shared across the + * classes inheriting AuthClient. This method should be used by any method + * that overrides getRequestMetadataAsync(), which is a shared helper for + * setting request information in both gRPC and HTTP API calls. + * + * @param headers object to append additional headers to. + */ + addSharedMetadataHeaders(headers) { + // quota_project_id, stored in application_default_credentials.json, is set in + // the x-goog-user-project header, to indicate an alternate account for + // billing and quota: + if (!headers['x-goog-user-project'] && // don't override a value the user sets. + this.quotaProjectId) { + headers['x-goog-user-project'] = this.quotaProjectId; + } + return headers; + } + /** + * Retry config for Auth-related requests. + * + * @remarks + * + * This is not a part of the default {@link AuthClient.transporter transporter/gaxios} + * config as some downstream APIs would prefer if customers explicitly enable retries, + * such as GCS. + */ + static get RETRY_CONFIG() { + return { + retry: true, + retryConfig: { + httpMethodsToRetry: ['GET', 'PUT', 'POST', 'HEAD', 'OPTIONS', 'DELETE'], + }, + }; + } +} +exports.AuthClient = AuthClient; + + +/***/ }), + +/***/ 1261: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _a, _AwsClient_DEFAULT_AWS_REGIONAL_CREDENTIAL_VERIFICATION_URL; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AwsClient = void 0; +const awsrequestsigner_1 = __nccwpck_require__(7647); +const baseexternalclient_1 = __nccwpck_require__(142); +const defaultawssecuritycredentialssupplier_1 = __nccwpck_require__(9157); +const util_1 = __nccwpck_require__(7870); +/** + * AWS external account client. This is used for AWS workloads, where + * AWS STS GetCallerIdentity serialized signed requests are exchanged for + * GCP access token. + */ +class AwsClient extends baseexternalclient_1.BaseExternalAccountClient { + /** + * Instantiates an AwsClient instance using the provided JSON + * object loaded from an external account credentials file. + * An error is thrown if the credential is not a valid AWS credential. + * @param options The external account options object typically loaded + * from the external account JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + super(options, additionalOptions); + const opts = (0, util_1.originalOrCamelOptions)(options); + const credentialSource = opts.get('credential_source'); + const awsSecurityCredentialsSupplier = opts.get('aws_security_credentials_supplier'); + // Validate credential sourcing configuration. + if (!credentialSource && !awsSecurityCredentialsSupplier) { + throw new Error('A credential source or AWS security credentials supplier must be specified.'); + } + if (credentialSource && awsSecurityCredentialsSupplier) { + throw new Error('Only one of credential source or AWS security credentials supplier can be specified.'); + } + if (awsSecurityCredentialsSupplier) { + this.awsSecurityCredentialsSupplier = awsSecurityCredentialsSupplier; + this.regionalCredVerificationUrl = + __classPrivateFieldGet(_a, _a, "f", _AwsClient_DEFAULT_AWS_REGIONAL_CREDENTIAL_VERIFICATION_URL); + this.credentialSourceType = 'programmatic'; + } + else { + const credentialSourceOpts = (0, util_1.originalOrCamelOptions)(credentialSource); + this.environmentId = credentialSourceOpts.get('environment_id'); + // This is only required if the AWS region is not available in the + // AWS_REGION or AWS_DEFAULT_REGION environment variables. + const regionUrl = credentialSourceOpts.get('region_url'); + // This is only required if AWS security credentials are not available in + // environment variables. + const securityCredentialsUrl = credentialSourceOpts.get('url'); + const imdsV2SessionTokenUrl = credentialSourceOpts.get('imdsv2_session_token_url'); + this.awsSecurityCredentialsSupplier = + new defaultawssecuritycredentialssupplier_1.DefaultAwsSecurityCredentialsSupplier({ + regionUrl: regionUrl, + securityCredentialsUrl: securityCredentialsUrl, + imdsV2SessionTokenUrl: imdsV2SessionTokenUrl, + }); + this.regionalCredVerificationUrl = credentialSourceOpts.get('regional_cred_verification_url'); + this.credentialSourceType = 'aws'; + // Data validators. + this.validateEnvironmentId(); + } + this.awsRequestSigner = null; + this.region = ''; + } + validateEnvironmentId() { + var _b; + const match = (_b = this.environmentId) === null || _b === void 0 ? void 0 : _b.match(/^(aws)(\d+)$/); + if (!match || !this.regionalCredVerificationUrl) { + throw new Error('No valid AWS "credential_source" provided'); + } + else if (parseInt(match[2], 10) !== 1) { + throw new Error(`aws version "${match[2]}" is not supported in the current build.`); + } + } + /** + * Triggered when an external subject token is needed to be exchanged for a + * GCP access token via GCP STS endpoint. This will call the + * {@link AwsSecurityCredentialsSupplier} to retrieve an AWS region and AWS + * Security Credentials, then use them to create a signed AWS STS request that + * can be exchanged for a GCP access token. + * @return A promise that resolves with the external subject token. + */ + async retrieveSubjectToken() { + // Initialize AWS request signer if not already initialized. + if (!this.awsRequestSigner) { + this.region = await this.awsSecurityCredentialsSupplier.getAwsRegion(this.supplierContext); + this.awsRequestSigner = new awsrequestsigner_1.AwsRequestSigner(async () => { + return this.awsSecurityCredentialsSupplier.getAwsSecurityCredentials(this.supplierContext); + }, this.region); + } + // Generate signed request to AWS STS GetCallerIdentity API. + // Use the required regional endpoint. Otherwise, the request will fail. + const options = await this.awsRequestSigner.getRequestOptions({ + ..._a.RETRY_CONFIG, + url: this.regionalCredVerificationUrl.replace('{region}', this.region), + method: 'POST', + }); + // The GCP STS endpoint expects the headers to be formatted as: + // [ + // {key: 'x-amz-date', value: '...'}, + // {key: 'Authorization', value: '...'}, + // ... + // ] + // And then serialized as: + // encodeURIComponent(JSON.stringify({ + // url: '...', + // method: 'POST', + // headers: [{key: 'x-amz-date', value: '...'}, ...] + // })) + const reformattedHeader = []; + const extendedHeaders = Object.assign({ + // The full, canonical resource name of the workload identity pool + // provider, with or without the HTTPS prefix. + // Including this header as part of the signature is recommended to + // ensure data integrity. + 'x-goog-cloud-target-resource': this.audience, + }, options.headers); + // Reformat header to GCP STS expected format. + for (const key in extendedHeaders) { + reformattedHeader.push({ + key, + value: extendedHeaders[key], + }); + } + // Serialize the reformatted signed request. + return encodeURIComponent(JSON.stringify({ + url: options.url, + method: options.method, + headers: reformattedHeader, + })); + } +} +exports.AwsClient = AwsClient; +_a = AwsClient; +_AwsClient_DEFAULT_AWS_REGIONAL_CREDENTIAL_VERIFICATION_URL = { value: 'https://sts.{region}.amazonaws.com?Action=GetCallerIdentity&Version=2011-06-15' }; +/** + * @deprecated AWS client no validates the EC2 metadata address. + **/ +AwsClient.AWS_EC2_METADATA_IPV4_ADDRESS = '169.254.169.254'; +/** + * @deprecated AWS client no validates the EC2 metadata address. + **/ +AwsClient.AWS_EC2_METADATA_IPV6_ADDRESS = 'fd00:ec2::254'; + + +/***/ }), + +/***/ 7647: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AwsRequestSigner = void 0; +const crypto_1 = __nccwpck_require__(8851); +/** AWS Signature Version 4 signing algorithm identifier. */ +const AWS_ALGORITHM = 'AWS4-HMAC-SHA256'; +/** + * The termination string for the AWS credential scope value as defined in + * https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html + */ +const AWS_REQUEST_TYPE = 'aws4_request'; +/** + * Implements an AWS API request signer based on the AWS Signature Version 4 + * signing process. + * https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html + */ +class AwsRequestSigner { + /** + * Instantiates an AWS API request signer used to send authenticated signed + * requests to AWS APIs based on the AWS Signature Version 4 signing process. + * This also provides a mechanism to generate the signed request without + * sending it. + * @param getCredentials A mechanism to retrieve AWS security credentials + * when needed. + * @param region The AWS region to use. + */ + constructor(getCredentials, region) { + this.getCredentials = getCredentials; + this.region = region; + this.crypto = (0, crypto_1.createCrypto)(); + } + /** + * Generates the signed request for the provided HTTP request for calling + * an AWS API. This follows the steps described at: + * https://docs.aws.amazon.com/general/latest/gr/sigv4_signing.html + * @param amzOptions The AWS request options that need to be signed. + * @return A promise that resolves with the GaxiosOptions containing the + * signed HTTP request parameters. + */ + async getRequestOptions(amzOptions) { + if (!amzOptions.url) { + throw new Error('"url" is required in "amzOptions"'); + } + // Stringify JSON requests. This will be set in the request body of the + // generated signed request. + const requestPayloadData = typeof amzOptions.data === 'object' + ? JSON.stringify(amzOptions.data) + : amzOptions.data; + const url = amzOptions.url; + const method = amzOptions.method || 'GET'; + const requestPayload = amzOptions.body || requestPayloadData; + const additionalAmzHeaders = amzOptions.headers; + const awsSecurityCredentials = await this.getCredentials(); + const uri = new URL(url); + const headerMap = await generateAuthenticationHeaderMap({ + crypto: this.crypto, + host: uri.host, + canonicalUri: uri.pathname, + canonicalQuerystring: uri.search.substr(1), + method, + region: this.region, + securityCredentials: awsSecurityCredentials, + requestPayload, + additionalAmzHeaders, + }); + // Append additional optional headers, eg. X-Amz-Target, Content-Type, etc. + const headers = Object.assign( + // Add x-amz-date if available. + headerMap.amzDate ? { 'x-amz-date': headerMap.amzDate } : {}, { + Authorization: headerMap.authorizationHeader, + host: uri.host, + }, additionalAmzHeaders || {}); + if (awsSecurityCredentials.token) { + Object.assign(headers, { + 'x-amz-security-token': awsSecurityCredentials.token, + }); + } + const awsSignedReq = { + url, + method: method, + headers, + }; + if (typeof requestPayload !== 'undefined') { + awsSignedReq.body = requestPayload; + } + return awsSignedReq; + } +} +exports.AwsRequestSigner = AwsRequestSigner; +/** + * Creates the HMAC-SHA256 hash of the provided message using the + * provided key. + * + * @param crypto The crypto instance used to facilitate cryptographic + * operations. + * @param key The HMAC-SHA256 key to use. + * @param msg The message to hash. + * @return The computed hash bytes. + */ +async function sign(crypto, key, msg) { + return await crypto.signWithHmacSha256(key, msg); +} +/** + * Calculates the signing key used to calculate the signature for + * AWS Signature Version 4 based on: + * https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html + * + * @param crypto The crypto instance used to facilitate cryptographic + * operations. + * @param key The AWS secret access key. + * @param dateStamp The '%Y%m%d' date format. + * @param region The AWS region. + * @param serviceName The AWS service name, eg. sts. + * @return The signing key bytes. + */ +async function getSigningKey(crypto, key, dateStamp, region, serviceName) { + const kDate = await sign(crypto, `AWS4${key}`, dateStamp); + const kRegion = await sign(crypto, kDate, region); + const kService = await sign(crypto, kRegion, serviceName); + const kSigning = await sign(crypto, kService, 'aws4_request'); + return kSigning; +} +/** + * Generates the authentication header map needed for generating the AWS + * Signature Version 4 signed request. + * + * @param option The options needed to compute the authentication header map. + * @return The AWS authentication header map which constitutes of the following + * components: amz-date, authorization header and canonical query string. + */ +async function generateAuthenticationHeaderMap(options) { + const additionalAmzHeaders = options.additionalAmzHeaders || {}; + const requestPayload = options.requestPayload || ''; + // iam.amazonaws.com host => iam service. + // sts.us-east-2.amazonaws.com => sts service. + const serviceName = options.host.split('.')[0]; + const now = new Date(); + // Format: '%Y%m%dT%H%M%SZ'. + const amzDate = now + .toISOString() + .replace(/[-:]/g, '') + .replace(/\.[0-9]+/, ''); + // Format: '%Y%m%d'. + const dateStamp = now.toISOString().replace(/[-]/g, '').replace(/T.*/, ''); + // Change all additional headers to be lower case. + const reformattedAdditionalAmzHeaders = {}; + Object.keys(additionalAmzHeaders).forEach(key => { + reformattedAdditionalAmzHeaders[key.toLowerCase()] = + additionalAmzHeaders[key]; + }); + // Add AWS token if available. + if (options.securityCredentials.token) { + reformattedAdditionalAmzHeaders['x-amz-security-token'] = + options.securityCredentials.token; + } + // Header keys need to be sorted alphabetically. + const amzHeaders = Object.assign({ + host: options.host, + }, + // Previously the date was not fixed with x-amz- and could be provided manually. + // https://github.com/boto/botocore/blob/879f8440a4e9ace5d3cf145ce8b3d5e5ffb892ef/tests/unit/auth/aws4_testsuite/get-header-value-trim.req + reformattedAdditionalAmzHeaders.date ? {} : { 'x-amz-date': amzDate }, reformattedAdditionalAmzHeaders); + let canonicalHeaders = ''; + const signedHeadersList = Object.keys(amzHeaders).sort(); + signedHeadersList.forEach(key => { + canonicalHeaders += `${key}:${amzHeaders[key]}\n`; + }); + const signedHeaders = signedHeadersList.join(';'); + const payloadHash = await options.crypto.sha256DigestHex(requestPayload); + // https://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html + const canonicalRequest = `${options.method}\n` + + `${options.canonicalUri}\n` + + `${options.canonicalQuerystring}\n` + + `${canonicalHeaders}\n` + + `${signedHeaders}\n` + + `${payloadHash}`; + const credentialScope = `${dateStamp}/${options.region}/${serviceName}/${AWS_REQUEST_TYPE}`; + // https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html + const stringToSign = `${AWS_ALGORITHM}\n` + + `${amzDate}\n` + + `${credentialScope}\n` + + (await options.crypto.sha256DigestHex(canonicalRequest)); + // https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html + const signingKey = await getSigningKey(options.crypto, options.securityCredentials.secretAccessKey, dateStamp, options.region, serviceName); + const signature = await sign(options.crypto, signingKey, stringToSign); + // https://docs.aws.amazon.com/general/latest/gr/sigv4-add-signature-to-request.html + const authorizationHeader = `${AWS_ALGORITHM} Credential=${options.securityCredentials.accessKeyId}/` + + `${credentialScope}, SignedHeaders=${signedHeaders}, ` + + `Signature=${(0, crypto_1.fromArrayBufferToHex)(signature)}`; + return { + // Do not return x-amz-date if date is available. + amzDate: reformattedAdditionalAmzHeaders.date ? undefined : amzDate, + authorizationHeader, + canonicalQuerystring: options.canonicalQuerystring, + }; +} + + +/***/ }), + +/***/ 142: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var _BaseExternalAccountClient_instances, _BaseExternalAccountClient_pendingAccessToken, _BaseExternalAccountClient_internalRefreshAccessTokenAsync; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.BaseExternalAccountClient = exports.DEFAULT_UNIVERSE = exports.CLOUD_RESOURCE_MANAGER = exports.EXTERNAL_ACCOUNT_TYPE = exports.EXPIRATION_TIME_OFFSET = void 0; +const stream = __nccwpck_require__(2203); +const authclient_1 = __nccwpck_require__(4810); +const sts = __nccwpck_require__(121); +const util_1 = __nccwpck_require__(7870); +/** + * The required token exchange grant_type: rfc8693#section-2.1 + */ +const STS_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:token-exchange'; +/** + * The requested token exchange requested_token_type: rfc8693#section-2.1 + */ +const STS_REQUEST_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; +/** The default OAuth scope to request when none is provided. */ +const DEFAULT_OAUTH_SCOPE = 'https://www.googleapis.com/auth/cloud-platform'; +/** Default impersonated token lifespan in seconds.*/ +const DEFAULT_TOKEN_LIFESPAN = 3600; +/** + * Offset to take into account network delays and server clock skews. + */ +exports.EXPIRATION_TIME_OFFSET = 5 * 60 * 1000; +/** + * The credentials JSON file type for external account clients. + * There are 3 types of JSON configs: + * 1. authorized_user => Google end user credential + * 2. service_account => Google service account credential + * 3. external_Account => non-GCP service (eg. AWS, Azure, K8s) + */ +exports.EXTERNAL_ACCOUNT_TYPE = 'external_account'; +/** + * Cloud resource manager URL used to retrieve project information. + * + * @deprecated use {@link BaseExternalAccountClient.cloudResourceManagerURL} instead + **/ +exports.CLOUD_RESOURCE_MANAGER = 'https://cloudresourcemanager.googleapis.com/v1/projects/'; +/** The workforce audience pattern. */ +const WORKFORCE_AUDIENCE_PATTERN = '//iam\\.googleapis\\.com/locations/[^/]+/workforcePools/[^/]+/providers/.+'; +const DEFAULT_TOKEN_URL = 'https://sts.{universeDomain}/v1/token'; +// eslint-disable-next-line @typescript-eslint/no-var-requires +const pkg = __nccwpck_require__(6066); +/** + * For backwards compatibility. + */ +var authclient_2 = __nccwpck_require__(4810); +Object.defineProperty(exports, "DEFAULT_UNIVERSE", ({ enumerable: true, get: function () { return authclient_2.DEFAULT_UNIVERSE; } })); +/** + * Base external account client. This is used to instantiate AuthClients for + * exchanging external account credentials for GCP access token and authorizing + * requests to GCP APIs. + * The base class implements common logic for exchanging various type of + * external credentials for GCP access token. The logic of determining and + * retrieving the external credential based on the environment and + * credential_source will be left for the subclasses. + */ +class BaseExternalAccountClient extends authclient_1.AuthClient { + /** + * Instantiate a BaseExternalAccountClient instance using the provided JSON + * object loaded from an external account credentials file. + * @param options The external account options object typically loaded + * from the external account JSON credential file. The camelCased options + * are aliases for the snake_cased options. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + var _a; + super({ ...options, ...additionalOptions }); + _BaseExternalAccountClient_instances.add(this); + /** + * A pending access token request. Used for concurrent calls. + */ + _BaseExternalAccountClient_pendingAccessToken.set(this, null); + const opts = (0, util_1.originalOrCamelOptions)(options); + const type = opts.get('type'); + if (type && type !== exports.EXTERNAL_ACCOUNT_TYPE) { + throw new Error(`Expected "${exports.EXTERNAL_ACCOUNT_TYPE}" type but ` + + `received "${options.type}"`); + } + const clientId = opts.get('client_id'); + const clientSecret = opts.get('client_secret'); + const tokenUrl = (_a = opts.get('token_url')) !== null && _a !== void 0 ? _a : DEFAULT_TOKEN_URL.replace('{universeDomain}', this.universeDomain); + const subjectTokenType = opts.get('subject_token_type'); + const workforcePoolUserProject = opts.get('workforce_pool_user_project'); + const serviceAccountImpersonationUrl = opts.get('service_account_impersonation_url'); + const serviceAccountImpersonation = opts.get('service_account_impersonation'); + const serviceAccountImpersonationLifetime = (0, util_1.originalOrCamelOptions)(serviceAccountImpersonation).get('token_lifetime_seconds'); + this.cloudResourceManagerURL = new URL(opts.get('cloud_resource_manager_url') || + `https://cloudresourcemanager.${this.universeDomain}/v1/projects/`); + if (clientId) { + this.clientAuth = { + confidentialClientType: 'basic', + clientId, + clientSecret, + }; + } + this.stsCredential = new sts.StsCredentials(tokenUrl, this.clientAuth); + this.scopes = opts.get('scopes') || [DEFAULT_OAUTH_SCOPE]; + this.cachedAccessToken = null; + this.audience = opts.get('audience'); + this.subjectTokenType = subjectTokenType; + this.workforcePoolUserProject = workforcePoolUserProject; + const workforceAudiencePattern = new RegExp(WORKFORCE_AUDIENCE_PATTERN); + if (this.workforcePoolUserProject && + !this.audience.match(workforceAudiencePattern)) { + throw new Error('workforcePoolUserProject should not be set for non-workforce pool ' + + 'credentials.'); + } + this.serviceAccountImpersonationUrl = serviceAccountImpersonationUrl; + this.serviceAccountImpersonationLifetime = + serviceAccountImpersonationLifetime; + if (this.serviceAccountImpersonationLifetime) { + this.configLifetimeRequested = true; + } + else { + this.configLifetimeRequested = false; + this.serviceAccountImpersonationLifetime = DEFAULT_TOKEN_LIFESPAN; + } + this.projectNumber = this.getProjectNumber(this.audience); + this.supplierContext = { + audience: this.audience, + subjectTokenType: this.subjectTokenType, + transporter: this.transporter, + }; + } + /** The service account email to be impersonated, if available. */ + getServiceAccountEmail() { + var _a; + if (this.serviceAccountImpersonationUrl) { + if (this.serviceAccountImpersonationUrl.length > 256) { + /** + * Prevents DOS attacks. + * @see {@link https://github.com/googleapis/google-auth-library-nodejs/security/code-scanning/84} + **/ + throw new RangeError(`URL is too long: ${this.serviceAccountImpersonationUrl}`); + } + // Parse email from URL. The formal looks as follows: + // https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/name@project-id.iam.gserviceaccount.com:generateAccessToken + const re = /serviceAccounts\/(?[^:]+):generateAccessToken$/; + const result = re.exec(this.serviceAccountImpersonationUrl); + return ((_a = result === null || result === void 0 ? void 0 : result.groups) === null || _a === void 0 ? void 0 : _a.email) || null; + } + return null; + } + /** + * Provides a mechanism to inject GCP access tokens directly. + * When the provided credential expires, a new credential, using the + * external account options, is retrieved. + * @param credentials The Credentials object to set on the current client. + */ + setCredentials(credentials) { + super.setCredentials(credentials); + this.cachedAccessToken = credentials; + } + /** + * @return A promise that resolves with the current GCP access token + * response. If the current credential is expired, a new one is retrieved. + */ + async getAccessToken() { + // If cached access token is unavailable or expired, force refresh. + if (!this.cachedAccessToken || this.isExpired(this.cachedAccessToken)) { + await this.refreshAccessTokenAsync(); + } + // Return GCP access token in GetAccessTokenResponse format. + return { + token: this.cachedAccessToken.access_token, + res: this.cachedAccessToken.res, + }; + } + /** + * The main authentication interface. It takes an optional url which when + * present is the endpoint being accessed, and returns a Promise which + * resolves with authorization header fields. + * + * The result has the form: + * { Authorization: 'Bearer ' } + */ + async getRequestHeaders() { + const accessTokenResponse = await this.getAccessToken(); + const headers = { + Authorization: `Bearer ${accessTokenResponse.token}`, + }; + return this.addSharedMetadataHeaders(headers); + } + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); + }); + } + else { + return this.requestAsync(opts); + } + } + /** + * @return A promise that resolves with the project ID corresponding to the + * current workload identity pool or current workforce pool if + * determinable. For workforce pool credential, it returns the project ID + * corresponding to the workforcePoolUserProject. + * This is introduced to match the current pattern of using the Auth + * library: + * const projectId = await auth.getProjectId(); + * const url = `https://dns.googleapis.com/dns/v1/projects/${projectId}`; + * const res = await client.request({ url }); + * The resource may not have permission + * (resourcemanager.projects.get) to call this API or the required + * scopes may not be selected: + * https://cloud.google.com/resource-manager/reference/rest/v1/projects/get#authorization-scopes + */ + async getProjectId() { + const projectNumber = this.projectNumber || this.workforcePoolUserProject; + if (this.projectId) { + // Return previously determined project ID. + return this.projectId; + } + else if (projectNumber) { + // Preferable not to use request() to avoid retrial policies. + const headers = await this.getRequestHeaders(); + const response = await this.transporter.request({ + ...BaseExternalAccountClient.RETRY_CONFIG, + headers, + url: `${this.cloudResourceManagerURL.toString()}${projectNumber}`, + responseType: 'json', + }); + this.projectId = response.data.projectId; + return this.projectId; + } + return null; + } + /** + * Authenticates the provided HTTP request, processes it and resolves with the + * returned response. + * @param opts The HTTP request options. + * @param reAuthRetried Whether the current attempt is a retry after a failed attempt due to an auth failure. + * @return A promise that resolves with the successful response. + */ + async requestAsync(opts, reAuthRetried = false) { + let response; + try { + const requestHeaders = await this.getRequestHeaders(); + opts.headers = opts.headers || {}; + if (requestHeaders && requestHeaders['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = + requestHeaders['x-goog-user-project']; + } + if (requestHeaders && requestHeaders.Authorization) { + opts.headers.Authorization = requestHeaders.Authorization; + } + response = await this.transporter.request(opts); + } + catch (e) { + const res = e.response; + if (res) { + const statusCode = res.status; + // Retry the request for metadata if the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - forceRefreshOnFailure is true + const isReadableStream = res.config.data instanceof stream.Readable; + const isAuthErr = statusCode === 401 || statusCode === 403; + if (!reAuthRetried && + isAuthErr && + !isReadableStream && + this.forceRefreshOnFailure) { + await this.refreshAccessTokenAsync(); + return await this.requestAsync(opts, true); + } + } + throw e; + } + return response; + } + /** + * Forces token refresh, even if unexpired tokens are currently cached. + * External credentials are exchanged for GCP access tokens via the token + * exchange endpoint and other settings provided in the client options + * object. + * If the service_account_impersonation_url is provided, an additional + * step to exchange the external account GCP access token for a service + * account impersonated token is performed. + * @return A promise that resolves with the fresh GCP access tokens. + */ + async refreshAccessTokenAsync() { + // Use an existing access token request, or cache a new one + __classPrivateFieldSet(this, _BaseExternalAccountClient_pendingAccessToken, __classPrivateFieldGet(this, _BaseExternalAccountClient_pendingAccessToken, "f") || __classPrivateFieldGet(this, _BaseExternalAccountClient_instances, "m", _BaseExternalAccountClient_internalRefreshAccessTokenAsync).call(this), "f"); + try { + return await __classPrivateFieldGet(this, _BaseExternalAccountClient_pendingAccessToken, "f"); + } + finally { + // clear pending access token for future requests + __classPrivateFieldSet(this, _BaseExternalAccountClient_pendingAccessToken, null, "f"); + } + } + /** + * Returns the workload identity pool project number if it is determinable + * from the audience resource name. + * @param audience The STS audience used to determine the project number. + * @return The project number associated with the workload identity pool, if + * this can be determined from the STS audience field. Otherwise, null is + * returned. + */ + getProjectNumber(audience) { + // STS audience pattern: + // //iam.googleapis.com/projects/$PROJECT_NUMBER/locations/... + const match = audience.match(/\/projects\/([^/]+)/); + if (!match) { + return null; + } + return match[1]; + } + /** + * Exchanges an external account GCP access token for a service + * account impersonated access token using iamcredentials + * GenerateAccessToken API. + * @param token The access token to exchange for a service account access + * token. + * @return A promise that resolves with the service account impersonated + * credentials response. + */ + async getImpersonatedAccessToken(token) { + const opts = { + ...BaseExternalAccountClient.RETRY_CONFIG, + url: this.serviceAccountImpersonationUrl, + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${token}`, + }, + data: { + scope: this.getScopesArray(), + lifetime: this.serviceAccountImpersonationLifetime + 's', + }, + responseType: 'json', + }; + const response = await this.transporter.request(opts); + const successResponse = response.data; + return { + access_token: successResponse.accessToken, + // Convert from ISO format to timestamp. + expiry_date: new Date(successResponse.expireTime).getTime(), + res: response, + }; + } + /** + * Returns whether the provided credentials are expired or not. + * If there is no expiry time, assumes the token is not expired or expiring. + * @param accessToken The credentials to check for expiration. + * @return Whether the credentials are expired or not. + */ + isExpired(accessToken) { + const now = new Date().getTime(); + return accessToken.expiry_date + ? now >= accessToken.expiry_date - this.eagerRefreshThresholdMillis + : false; + } + /** + * @return The list of scopes for the requested GCP access token. + */ + getScopesArray() { + // Since scopes can be provided as string or array, the type should + // be normalized. + if (typeof this.scopes === 'string') { + return [this.scopes]; + } + return this.scopes || [DEFAULT_OAUTH_SCOPE]; + } + getMetricsHeaderValue() { + const nodeVersion = process.version.replace(/^v/, ''); + const saImpersonation = this.serviceAccountImpersonationUrl !== undefined; + const credentialSourceType = this.credentialSourceType + ? this.credentialSourceType + : 'unknown'; + return `gl-node/${nodeVersion} auth/${pkg.version} google-byoid-sdk source/${credentialSourceType} sa-impersonation/${saImpersonation} config-lifetime/${this.configLifetimeRequested}`; + } +} +exports.BaseExternalAccountClient = BaseExternalAccountClient; +_BaseExternalAccountClient_pendingAccessToken = new WeakMap(), _BaseExternalAccountClient_instances = new WeakSet(), _BaseExternalAccountClient_internalRefreshAccessTokenAsync = async function _BaseExternalAccountClient_internalRefreshAccessTokenAsync() { + // Retrieve the external credential. + const subjectToken = await this.retrieveSubjectToken(); + // Construct the STS credentials options. + const stsCredentialsOptions = { + grantType: STS_GRANT_TYPE, + audience: this.audience, + requestedTokenType: STS_REQUEST_TOKEN_TYPE, + subjectToken, + subjectTokenType: this.subjectTokenType, + // generateAccessToken requires the provided access token to have + // scopes: + // https://www.googleapis.com/auth/iam or + // https://www.googleapis.com/auth/cloud-platform + // The new service account access token scopes will match the user + // provided ones. + scope: this.serviceAccountImpersonationUrl + ? [DEFAULT_OAUTH_SCOPE] + : this.getScopesArray(), + }; + // Exchange the external credentials for a GCP access token. + // Client auth is prioritized over passing the workforcePoolUserProject + // parameter for STS token exchange. + const additionalOptions = !this.clientAuth && this.workforcePoolUserProject + ? { userProject: this.workforcePoolUserProject } + : undefined; + const additionalHeaders = { + 'x-goog-api-client': this.getMetricsHeaderValue(), + }; + const stsResponse = await this.stsCredential.exchangeToken(stsCredentialsOptions, additionalHeaders, additionalOptions); + if (this.serviceAccountImpersonationUrl) { + this.cachedAccessToken = await this.getImpersonatedAccessToken(stsResponse.access_token); + } + else if (stsResponse.expires_in) { + // Save response in cached access token. + this.cachedAccessToken = { + access_token: stsResponse.access_token, + expiry_date: new Date().getTime() + stsResponse.expires_in * 1000, + res: stsResponse.res, + }; + } + else { + // Save response in cached access token. + this.cachedAccessToken = { + access_token: stsResponse.access_token, + res: stsResponse.res, + }; + } + // Save credentials. + this.credentials = {}; + Object.assign(this.credentials, this.cachedAccessToken); + delete this.credentials.res; + // Trigger tokens event to notify external listeners. + this.emit('tokens', { + refresh_token: null, + expiry_date: this.cachedAccessToken.expiry_date, + access_token: this.cachedAccessToken.access_token, + token_type: 'Bearer', + id_token: null, + }); + // Return the cached access token. + return this.cachedAccessToken; +}; + + +/***/ }), + +/***/ 977: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2013 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Compute = void 0; +const gaxios_1 = __nccwpck_require__(7003); +const gcpMetadata = __nccwpck_require__(3046); +const oauth2client_1 = __nccwpck_require__(91); +class Compute extends oauth2client_1.OAuth2Client { + /** + * Google Compute Engine service account credentials. + * + * Retrieve access token from the metadata server. + * See: https://cloud.google.com/compute/docs/access/authenticate-workloads#applications + */ + constructor(options = {}) { + super(options); + // Start with an expired refresh token, which will automatically be + // refreshed before the first API call is made. + this.credentials = { expiry_date: 1, refresh_token: 'compute-placeholder' }; + this.serviceAccountEmail = options.serviceAccountEmail || 'default'; + this.scopes = Array.isArray(options.scopes) + ? options.scopes + : options.scopes + ? [options.scopes] + : []; + } + /** + * Refreshes the access token. + * @param refreshToken Unused parameter + */ + async refreshTokenNoCache( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + refreshToken) { + const tokenPath = `service-accounts/${this.serviceAccountEmail}/token`; + let data; + try { + const instanceOptions = { + property: tokenPath, + }; + if (this.scopes.length > 0) { + instanceOptions.params = { + scopes: this.scopes.join(','), + }; + } + data = await gcpMetadata.instance(instanceOptions); + } + catch (e) { + if (e instanceof gaxios_1.GaxiosError) { + e.message = `Could not refresh access token: ${e.message}`; + this.wrapError(e); + } + throw e; + } + const tokens = data; + if (data && data.expires_in) { + tokens.expiry_date = new Date().getTime() + data.expires_in * 1000; + delete tokens.expires_in; + } + this.emit('tokens', tokens); + return { tokens, res: null }; + } + /** + * Fetches an ID token. + * @param targetAudience the audience for the fetched ID token. + */ + async fetchIdToken(targetAudience) { + const idTokenPath = `service-accounts/${this.serviceAccountEmail}/identity` + + `?format=full&audience=${targetAudience}`; + let idToken; + try { + const instanceOptions = { + property: idTokenPath, + }; + idToken = await gcpMetadata.instance(instanceOptions); + } + catch (e) { + if (e instanceof Error) { + e.message = `Could not fetch ID token: ${e.message}`; + } + throw e; + } + return idToken; + } + wrapError(e) { + const res = e.response; + if (res && res.status) { + e.status = res.status; + if (res.status === 403) { + e.message = + 'A Forbidden error was returned while attempting to retrieve an access ' + + 'token for the Compute Engine built-in service account. This may be because the Compute ' + + 'Engine instance does not have the correct permission scopes specified: ' + + e.message; + } + else if (res.status === 404) { + e.message = + 'A Not Found error was returned while attempting to retrieve an access' + + 'token for the Compute Engine built-in service account. This may be because the Compute ' + + 'Engine instance does not have any permission scopes specified: ' + + e.message; + } + } + } +} +exports.Compute = Compute; + + +/***/ }), + +/***/ 9157: +/***/ (function(__unused_webpack_module, exports) { + +"use strict"; + +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _DefaultAwsSecurityCredentialsSupplier_instances, _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken, _DefaultAwsSecurityCredentialsSupplier_getAwsRoleName, _DefaultAwsSecurityCredentialsSupplier_retrieveAwsSecurityCredentials, _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get, _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DefaultAwsSecurityCredentialsSupplier = void 0; +/** + * Internal AWS security credentials supplier implementation used by {@link AwsClient} + * when a credential source is provided instead of a user defined supplier. + * The logic is summarized as: + * 1. If imdsv2_session_token_url is provided in the credential source, then + * fetch the aws session token and include it in the headers of the + * metadata requests. This is a requirement for IDMSv2 but optional + * for IDMSv1. + * 2. Retrieve AWS region from availability-zone. + * 3a. Check AWS credentials in environment variables. If not found, get + * from security-credentials endpoint. + * 3b. Get AWS credentials from security-credentials endpoint. In order + * to retrieve this, the AWS role needs to be determined by calling + * security-credentials endpoint without any argument. Then the + * credentials can be retrieved via: security-credentials/role_name + * 4. Generate the signed request to AWS STS GetCallerIdentity action. + * 5. Inject x-goog-cloud-target-resource into header and serialize the + * signed request. This will be the subject-token to pass to GCP STS. + */ +class DefaultAwsSecurityCredentialsSupplier { + /** + * Instantiates a new DefaultAwsSecurityCredentialsSupplier using information + * from the credential_source stored in the ADC file. + * @param opts The default aws security credentials supplier options object to + * build the supplier with. + */ + constructor(opts) { + _DefaultAwsSecurityCredentialsSupplier_instances.add(this); + this.regionUrl = opts.regionUrl; + this.securityCredentialsUrl = opts.securityCredentialsUrl; + this.imdsV2SessionTokenUrl = opts.imdsV2SessionTokenUrl; + this.additionalGaxiosOptions = opts.additionalGaxiosOptions; + } + /** + * Returns the active AWS region. This first checks to see if the region + * is available as an environment variable. If it is not, then the supplier + * will call the region URL. + * @param context {@link ExternalAccountSupplierContext} from the calling + * {@link AwsClient}, contains the requested audience and subject token type + * for the external account identity. + * @return A promise that resolves with the AWS region string. + */ + async getAwsRegion(context) { + // Priority order for region determination: + // AWS_REGION > AWS_DEFAULT_REGION > metadata server. + if (__classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get)) { + return __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get); + } + const metadataHeaders = {}; + if (!__classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get) && this.imdsV2SessionTokenUrl) { + metadataHeaders['x-aws-ec2-metadata-token'] = + await __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "m", _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken).call(this, context.transporter); + } + if (!this.regionUrl) { + throw new Error('Unable to determine AWS region due to missing ' + + '"options.credential_source.region_url"'); + } + const opts = { + ...this.additionalGaxiosOptions, + url: this.regionUrl, + method: 'GET', + responseType: 'text', + headers: metadataHeaders, + }; + const response = await context.transporter.request(opts); + // Remove last character. For example, if us-east-2b is returned, + // the region would be us-east-2. + return response.data.substr(0, response.data.length - 1); + } + /** + * Returns AWS security credentials. This first checks to see if the credentials + * is available as environment variables. If it is not, then the supplier + * will call the security credentials URL. + * @param context {@link ExternalAccountSupplierContext} from the calling + * {@link AwsClient}, contains the requested audience and subject token type + * for the external account identity. + * @return A promise that resolves with the AWS security credentials. + */ + async getAwsSecurityCredentials(context) { + // Check environment variables for permanent credentials first. + // https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html + if (__classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get)) { + return __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get); + } + const metadataHeaders = {}; + if (this.imdsV2SessionTokenUrl) { + metadataHeaders['x-aws-ec2-metadata-token'] = + await __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "m", _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken).call(this, context.transporter); + } + // Since the role on a VM can change, we don't need to cache it. + const roleName = await __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "m", _DefaultAwsSecurityCredentialsSupplier_getAwsRoleName).call(this, metadataHeaders, context.transporter); + // Temporary credentials typically last for several hours. + // Expiration is returned in response. + // Consider future optimization of this logic to cache AWS tokens + // until their natural expiration. + const awsCreds = await __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "m", _DefaultAwsSecurityCredentialsSupplier_retrieveAwsSecurityCredentials).call(this, roleName, metadataHeaders, context.transporter); + return { + accessKeyId: awsCreds.AccessKeyId, + secretAccessKey: awsCreds.SecretAccessKey, + token: awsCreds.Token, + }; + } +} +exports.DefaultAwsSecurityCredentialsSupplier = DefaultAwsSecurityCredentialsSupplier; +_DefaultAwsSecurityCredentialsSupplier_instances = new WeakSet(), _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken = +/** + * @param transporter The transporter to use for requests. + * @return A promise that resolves with the IMDSv2 Session Token. + */ +async function _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken(transporter) { + const opts = { + ...this.additionalGaxiosOptions, + url: this.imdsV2SessionTokenUrl, + method: 'PUT', + responseType: 'text', + headers: { 'x-aws-ec2-metadata-token-ttl-seconds': '300' }, + }; + const response = await transporter.request(opts); + return response.data; +}, _DefaultAwsSecurityCredentialsSupplier_getAwsRoleName = +/** + * @param headers The headers to be used in the metadata request. + * @param transporter The transporter to use for requests. + * @return A promise that resolves with the assigned role to the current + * AWS VM. This is needed for calling the security-credentials endpoint. + */ +async function _DefaultAwsSecurityCredentialsSupplier_getAwsRoleName(headers, transporter) { + if (!this.securityCredentialsUrl) { + throw new Error('Unable to determine AWS role name due to missing ' + + '"options.credential_source.url"'); + } + const opts = { + ...this.additionalGaxiosOptions, + url: this.securityCredentialsUrl, + method: 'GET', + responseType: 'text', + headers: headers, + }; + const response = await transporter.request(opts); + return response.data; +}, _DefaultAwsSecurityCredentialsSupplier_retrieveAwsSecurityCredentials = +/** + * Retrieves the temporary AWS credentials by calling the security-credentials + * endpoint as specified in the `credential_source` object. + * @param roleName The role attached to the current VM. + * @param headers The headers to be used in the metadata request. + * @param transporter The transporter to use for requests. + * @return A promise that resolves with the temporary AWS credentials + * needed for creating the GetCallerIdentity signed request. + */ +async function _DefaultAwsSecurityCredentialsSupplier_retrieveAwsSecurityCredentials(roleName, headers, transporter) { + const response = await transporter.request({ + ...this.additionalGaxiosOptions, + url: `${this.securityCredentialsUrl}/${roleName}`, + responseType: 'json', + headers: headers, + }); + return response.data; +}, _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get = function _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get() { + // The AWS region can be provided through AWS_REGION or AWS_DEFAULT_REGION. + // Only one is required. + return (process.env['AWS_REGION'] || process.env['AWS_DEFAULT_REGION'] || null); +}, _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get = function _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get() { + // Both AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY are required. + if (process.env['AWS_ACCESS_KEY_ID'] && + process.env['AWS_SECRET_ACCESS_KEY']) { + return { + accessKeyId: process.env['AWS_ACCESS_KEY_ID'], + secretAccessKey: process.env['AWS_SECRET_ACCESS_KEY'], + token: process.env['AWS_SESSION_TOKEN'], + }; + } + return null; +}; + + +/***/ }), + +/***/ 7556: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DownscopedClient = exports.EXPIRATION_TIME_OFFSET = exports.MAX_ACCESS_BOUNDARY_RULES_COUNT = void 0; +const stream = __nccwpck_require__(2203); +const authclient_1 = __nccwpck_require__(4810); +const sts = __nccwpck_require__(121); +/** + * The required token exchange grant_type: rfc8693#section-2.1 + */ +const STS_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:token-exchange'; +/** + * The requested token exchange requested_token_type: rfc8693#section-2.1 + */ +const STS_REQUEST_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; +/** + * The requested token exchange subject_token_type: rfc8693#section-2.1 + */ +const STS_SUBJECT_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; +/** + * The maximum number of access boundary rules a Credential Access Boundary + * can contain. + */ +exports.MAX_ACCESS_BOUNDARY_RULES_COUNT = 10; +/** + * Offset to take into account network delays and server clock skews. + */ +exports.EXPIRATION_TIME_OFFSET = 5 * 60 * 1000; +/** + * Defines a set of Google credentials that are downscoped from an existing set + * of Google OAuth2 credentials. This is useful to restrict the Identity and + * Access Management (IAM) permissions that a short-lived credential can use. + * The common pattern of usage is to have a token broker with elevated access + * generate these downscoped credentials from higher access source credentials + * and pass the downscoped short-lived access tokens to a token consumer via + * some secure authenticated channel for limited access to Google Cloud Storage + * resources. + */ +class DownscopedClient extends authclient_1.AuthClient { + /** + * Instantiates a downscoped client object using the provided source + * AuthClient and credential access boundary rules. + * To downscope permissions of a source AuthClient, a Credential Access + * Boundary that specifies which resources the new credential can access, as + * well as an upper bound on the permissions that are available on each + * resource, has to be defined. A downscoped client can then be instantiated + * using the source AuthClient and the Credential Access Boundary. + * @param authClient The source AuthClient to be downscoped based on the + * provided Credential Access Boundary rules. + * @param credentialAccessBoundary The Credential Access Boundary which + * contains a list of access boundary rules. Each rule contains information + * on the resource that the rule applies to, the upper bound of the + * permissions that are available on that resource and an optional + * condition to further restrict permissions. + * @param additionalOptions **DEPRECATED, set this in the provided `authClient`.** + * Optional additional behavior customization options. + * @param quotaProjectId **DEPRECATED, set this in the provided `authClient`.** + * Optional quota project id for setting up in the x-goog-user-project header. + */ + constructor(authClient, credentialAccessBoundary, additionalOptions, quotaProjectId) { + super({ ...additionalOptions, quotaProjectId }); + this.authClient = authClient; + this.credentialAccessBoundary = credentialAccessBoundary; + // Check 1-10 Access Boundary Rules are defined within Credential Access + // Boundary. + if (credentialAccessBoundary.accessBoundary.accessBoundaryRules.length === 0) { + throw new Error('At least one access boundary rule needs to be defined.'); + } + else if (credentialAccessBoundary.accessBoundary.accessBoundaryRules.length > + exports.MAX_ACCESS_BOUNDARY_RULES_COUNT) { + throw new Error('The provided access boundary has more than ' + + `${exports.MAX_ACCESS_BOUNDARY_RULES_COUNT} access boundary rules.`); + } + // Check at least one permission should be defined in each Access Boundary + // Rule. + for (const rule of credentialAccessBoundary.accessBoundary + .accessBoundaryRules) { + if (rule.availablePermissions.length === 0) { + throw new Error('At least one permission should be defined in access boundary rules.'); + } + } + this.stsCredential = new sts.StsCredentials(`https://sts.${this.universeDomain}/v1/token`); + this.cachedDownscopedAccessToken = null; + } + /** + * Provides a mechanism to inject Downscoped access tokens directly. + * The expiry_date field is required to facilitate determination of the token + * expiration which would make it easier for the token consumer to handle. + * @param credentials The Credentials object to set on the current client. + */ + setCredentials(credentials) { + if (!credentials.expiry_date) { + throw new Error('The access token expiry_date field is missing in the provided ' + + 'credentials.'); + } + super.setCredentials(credentials); + this.cachedDownscopedAccessToken = credentials; + } + async getAccessToken() { + // If the cached access token is unavailable or expired, force refresh. + // The Downscoped access token will be returned in + // DownscopedAccessTokenResponse format. + if (!this.cachedDownscopedAccessToken || + this.isExpired(this.cachedDownscopedAccessToken)) { + await this.refreshAccessTokenAsync(); + } + // Return Downscoped access token in DownscopedAccessTokenResponse format. + return { + token: this.cachedDownscopedAccessToken.access_token, + expirationTime: this.cachedDownscopedAccessToken.expiry_date, + res: this.cachedDownscopedAccessToken.res, + }; + } + /** + * The main authentication interface. It takes an optional url which when + * present is the endpoint being accessed, and returns a Promise which + * resolves with authorization header fields. + * + * The result has the form: + * { Authorization: 'Bearer ' } + */ + async getRequestHeaders() { + const accessTokenResponse = await this.getAccessToken(); + const headers = { + Authorization: `Bearer ${accessTokenResponse.token}`, + }; + return this.addSharedMetadataHeaders(headers); + } + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); + }); + } + else { + return this.requestAsync(opts); + } + } + /** + * Authenticates the provided HTTP request, processes it and resolves with the + * returned response. + * @param opts The HTTP request options. + * @param reAuthRetried Whether the current attempt is a retry after a failed attempt due to an auth failure + * @return A promise that resolves with the successful response. + */ + async requestAsync(opts, reAuthRetried = false) { + let response; + try { + const requestHeaders = await this.getRequestHeaders(); + opts.headers = opts.headers || {}; + if (requestHeaders && requestHeaders['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = + requestHeaders['x-goog-user-project']; + } + if (requestHeaders && requestHeaders.Authorization) { + opts.headers.Authorization = requestHeaders.Authorization; + } + response = await this.transporter.request(opts); + } + catch (e) { + const res = e.response; + if (res) { + const statusCode = res.status; + // Retry the request for metadata if the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - forceRefreshOnFailure is true + const isReadableStream = res.config.data instanceof stream.Readable; + const isAuthErr = statusCode === 401 || statusCode === 403; + if (!reAuthRetried && + isAuthErr && + !isReadableStream && + this.forceRefreshOnFailure) { + await this.refreshAccessTokenAsync(); + return await this.requestAsync(opts, true); + } + } + throw e; + } + return response; + } + /** + * Forces token refresh, even if unexpired tokens are currently cached. + * GCP access tokens are retrieved from authclient object/source credential. + * Then GCP access tokens are exchanged for downscoped access tokens via the + * token exchange endpoint. + * @return A promise that resolves with the fresh downscoped access token. + */ + async refreshAccessTokenAsync() { + var _a; + // Retrieve GCP access token from source credential. + const subjectToken = (await this.authClient.getAccessToken()).token; + // Construct the STS credentials options. + const stsCredentialsOptions = { + grantType: STS_GRANT_TYPE, + requestedTokenType: STS_REQUEST_TOKEN_TYPE, + subjectToken: subjectToken, + subjectTokenType: STS_SUBJECT_TOKEN_TYPE, + }; + // Exchange the source AuthClient access token for a Downscoped access + // token. + const stsResponse = await this.stsCredential.exchangeToken(stsCredentialsOptions, undefined, this.credentialAccessBoundary); + /** + * The STS endpoint will only return the expiration time for the downscoped + * access token if the original access token represents a service account. + * The downscoped token's expiration time will always match the source + * credential expiration. When no expires_in is returned, we can copy the + * source credential's expiration time. + */ + const sourceCredExpireDate = ((_a = this.authClient.credentials) === null || _a === void 0 ? void 0 : _a.expiry_date) || null; + const expiryDate = stsResponse.expires_in + ? new Date().getTime() + stsResponse.expires_in * 1000 + : sourceCredExpireDate; + // Save response in cached access token. + this.cachedDownscopedAccessToken = { + access_token: stsResponse.access_token, + expiry_date: expiryDate, + res: stsResponse.res, + }; + // Save credentials. + this.credentials = {}; + Object.assign(this.credentials, this.cachedDownscopedAccessToken); + delete this.credentials.res; + // Trigger tokens event to notify external listeners. + this.emit('tokens', { + refresh_token: null, + expiry_date: this.cachedDownscopedAccessToken.expiry_date, + access_token: this.cachedDownscopedAccessToken.access_token, + token_type: 'Bearer', + id_token: null, + }); + // Return the cached access token. + return this.cachedDownscopedAccessToken; + } + /** + * Returns whether the provided credentials are expired or not. + * If there is no expiry time, assumes the token is not expired or expiring. + * @param downscopedAccessToken The credentials to check for expiration. + * @return Whether the credentials are expired or not. + */ + isExpired(downscopedAccessToken) { + const now = new Date().getTime(); + return downscopedAccessToken.expiry_date + ? now >= + downscopedAccessToken.expiry_date - this.eagerRefreshThresholdMillis + : false; + } +} +exports.DownscopedClient = DownscopedClient; + + +/***/ }), + +/***/ 963: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2018 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GCPEnv = void 0; +exports.clear = clear; +exports.getEnv = getEnv; +const gcpMetadata = __nccwpck_require__(3046); +var GCPEnv; +(function (GCPEnv) { + GCPEnv["APP_ENGINE"] = "APP_ENGINE"; + GCPEnv["KUBERNETES_ENGINE"] = "KUBERNETES_ENGINE"; + GCPEnv["CLOUD_FUNCTIONS"] = "CLOUD_FUNCTIONS"; + GCPEnv["COMPUTE_ENGINE"] = "COMPUTE_ENGINE"; + GCPEnv["CLOUD_RUN"] = "CLOUD_RUN"; + GCPEnv["NONE"] = "NONE"; +})(GCPEnv || (exports.GCPEnv = GCPEnv = {})); +let envPromise; +function clear() { + envPromise = undefined; +} +async function getEnv() { + if (envPromise) { + return envPromise; + } + envPromise = getEnvMemoized(); + return envPromise; +} +async function getEnvMemoized() { + let env = GCPEnv.NONE; + if (isAppEngine()) { + env = GCPEnv.APP_ENGINE; + } + else if (isCloudFunction()) { + env = GCPEnv.CLOUD_FUNCTIONS; + } + else if (await isComputeEngine()) { + if (await isKubernetesEngine()) { + env = GCPEnv.KUBERNETES_ENGINE; + } + else if (isCloudRun()) { + env = GCPEnv.CLOUD_RUN; + } + else { + env = GCPEnv.COMPUTE_ENGINE; + } + } + else { + env = GCPEnv.NONE; + } + return env; +} +function isAppEngine() { + return !!(process.env.GAE_SERVICE || process.env.GAE_MODULE_NAME); +} +function isCloudFunction() { + return !!(process.env.FUNCTION_NAME || process.env.FUNCTION_TARGET); +} +/** + * This check only verifies that the environment is running knative. + * This must be run *after* checking for Kubernetes, otherwise it will + * return a false positive. + */ +function isCloudRun() { + return !!process.env.K_CONFIGURATION; +} +async function isKubernetesEngine() { + try { + await gcpMetadata.instance('attributes/cluster-name'); + return true; + } + catch (e) { + return false; + } +} +async function isComputeEngine() { + return gcpMetadata.isAvailable(); +} + + +/***/ }), + +/***/ 3247: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.InvalidSubjectTokenError = exports.InvalidMessageFieldError = exports.InvalidCodeFieldError = exports.InvalidTokenTypeFieldError = exports.InvalidExpirationTimeFieldError = exports.InvalidSuccessFieldError = exports.InvalidVersionFieldError = exports.ExecutableResponseError = exports.ExecutableResponse = void 0; +const SAML_SUBJECT_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:saml2'; +const OIDC_SUBJECT_TOKEN_TYPE1 = 'urn:ietf:params:oauth:token-type:id_token'; +const OIDC_SUBJECT_TOKEN_TYPE2 = 'urn:ietf:params:oauth:token-type:jwt'; +/** + * Defines the response of a 3rd party executable run by the pluggable auth client. + */ +class ExecutableResponse { + /** + * Instantiates an ExecutableResponse instance using the provided JSON object + * from the output of the executable. + * @param responseJson Response from a 3rd party executable, loaded from a + * run of the executable or a cached output file. + */ + constructor(responseJson) { + // Check that the required fields exist in the json response. + if (!responseJson.version) { + throw new InvalidVersionFieldError("Executable response must contain a 'version' field."); + } + if (responseJson.success === undefined) { + throw new InvalidSuccessFieldError("Executable response must contain a 'success' field."); + } + this.version = responseJson.version; + this.success = responseJson.success; + // Validate required fields for a successful response. + if (this.success) { + this.expirationTime = responseJson.expiration_time; + this.tokenType = responseJson.token_type; + // Validate token type field. + if (this.tokenType !== SAML_SUBJECT_TOKEN_TYPE && + this.tokenType !== OIDC_SUBJECT_TOKEN_TYPE1 && + this.tokenType !== OIDC_SUBJECT_TOKEN_TYPE2) { + throw new InvalidTokenTypeFieldError("Executable response must contain a 'token_type' field when successful " + + `and it must be one of ${OIDC_SUBJECT_TOKEN_TYPE1}, ${OIDC_SUBJECT_TOKEN_TYPE2}, or ${SAML_SUBJECT_TOKEN_TYPE}.`); + } + // Validate subject token. + if (this.tokenType === SAML_SUBJECT_TOKEN_TYPE) { + if (!responseJson.saml_response) { + throw new InvalidSubjectTokenError(`Executable response must contain a 'saml_response' field when token_type=${SAML_SUBJECT_TOKEN_TYPE}.`); + } + this.subjectToken = responseJson.saml_response; + } + else { + if (!responseJson.id_token) { + throw new InvalidSubjectTokenError("Executable response must contain a 'id_token' field when " + + `token_type=${OIDC_SUBJECT_TOKEN_TYPE1} or ${OIDC_SUBJECT_TOKEN_TYPE2}.`); + } + this.subjectToken = responseJson.id_token; + } + } + else { + // Both code and message must be provided for unsuccessful responses. + if (!responseJson.code) { + throw new InvalidCodeFieldError("Executable response must contain a 'code' field when unsuccessful."); + } + if (!responseJson.message) { + throw new InvalidMessageFieldError("Executable response must contain a 'message' field when unsuccessful."); + } + this.errorCode = responseJson.code; + this.errorMessage = responseJson.message; + } + } + /** + * @return A boolean representing if the response has a valid token. Returns + * true when the response was successful and the token is not expired. + */ + isValid() { + return !this.isExpired() && this.success; + } + /** + * @return A boolean representing if the response is expired. Returns true if the + * provided timeout has passed. + */ + isExpired() { + return (this.expirationTime !== undefined && + this.expirationTime < Math.round(Date.now() / 1000)); + } +} +exports.ExecutableResponse = ExecutableResponse; +/** + * An error thrown by the ExecutableResponse class. + */ +class ExecutableResponseError extends Error { + constructor(message) { + super(message); + Object.setPrototypeOf(this, new.target.prototype); + } +} +exports.ExecutableResponseError = ExecutableResponseError; +/** + * An error thrown when the 'version' field in an executable response is missing or invalid. + */ +class InvalidVersionFieldError extends ExecutableResponseError { +} +exports.InvalidVersionFieldError = InvalidVersionFieldError; +/** + * An error thrown when the 'success' field in an executable response is missing or invalid. + */ +class InvalidSuccessFieldError extends ExecutableResponseError { +} +exports.InvalidSuccessFieldError = InvalidSuccessFieldError; +/** + * An error thrown when the 'expiration_time' field in an executable response is missing or invalid. + */ +class InvalidExpirationTimeFieldError extends ExecutableResponseError { +} +exports.InvalidExpirationTimeFieldError = InvalidExpirationTimeFieldError; +/** + * An error thrown when the 'token_type' field in an executable response is missing or invalid. + */ +class InvalidTokenTypeFieldError extends ExecutableResponseError { +} +exports.InvalidTokenTypeFieldError = InvalidTokenTypeFieldError; +/** + * An error thrown when the 'code' field in an executable response is missing or invalid. + */ +class InvalidCodeFieldError extends ExecutableResponseError { +} +exports.InvalidCodeFieldError = InvalidCodeFieldError; +/** + * An error thrown when the 'message' field in an executable response is missing or invalid. + */ +class InvalidMessageFieldError extends ExecutableResponseError { +} +exports.InvalidMessageFieldError = InvalidMessageFieldError; +/** + * An error thrown when the subject token in an executable response is missing or invalid. + */ +class InvalidSubjectTokenError extends ExecutableResponseError { +} +exports.InvalidSubjectTokenError = InvalidSubjectTokenError; + + +/***/ }), + +/***/ 4240: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ExternalAccountAuthorizedUserClient = exports.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE = void 0; +const authclient_1 = __nccwpck_require__(4810); +const oauth2common_1 = __nccwpck_require__(6653); +const gaxios_1 = __nccwpck_require__(7003); +const stream = __nccwpck_require__(2203); +const baseexternalclient_1 = __nccwpck_require__(142); +/** + * The credentials JSON file type for external account authorized user clients. + */ +exports.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE = 'external_account_authorized_user'; +const DEFAULT_TOKEN_URL = 'https://sts.{universeDomain}/v1/oauthtoken'; +/** + * Handler for token refresh requests sent to the token_url endpoint for external + * authorized user credentials. + */ +class ExternalAccountAuthorizedUserHandler extends oauth2common_1.OAuthClientAuthHandler { + /** + * Initializes an ExternalAccountAuthorizedUserHandler instance. + * @param url The URL of the token refresh endpoint. + * @param transporter The transporter to use for the refresh request. + * @param clientAuthentication The client authentication credentials to use + * for the refresh request. + */ + constructor(url, transporter, clientAuthentication) { + super(clientAuthentication); + this.url = url; + this.transporter = transporter; + } + /** + * Requests a new access token from the token_url endpoint using the provided + * refresh token. + * @param refreshToken The refresh token to use to generate a new access token. + * @param additionalHeaders Optional additional headers to pass along the + * request. + * @return A promise that resolves with the token refresh response containing + * the requested access token and its expiration time. + */ + async refreshToken(refreshToken, additionalHeaders) { + const values = new URLSearchParams({ + grant_type: 'refresh_token', + refresh_token: refreshToken, + }); + const headers = { + 'Content-Type': 'application/x-www-form-urlencoded', + ...additionalHeaders, + }; + const opts = { + ...ExternalAccountAuthorizedUserHandler.RETRY_CONFIG, + url: this.url, + method: 'POST', + headers, + data: values.toString(), + responseType: 'json', + }; + // Apply OAuth client authentication. + this.applyClientAuthenticationOptions(opts); + try { + const response = await this.transporter.request(opts); + // Successful response. + const tokenRefreshResponse = response.data; + tokenRefreshResponse.res = response; + return tokenRefreshResponse; + } + catch (error) { + // Translate error to OAuthError. + if (error instanceof gaxios_1.GaxiosError && error.response) { + throw (0, oauth2common_1.getErrorFromOAuthErrorResponse)(error.response.data, + // Preserve other fields from the original error. + error); + } + // Request could fail before the server responds. + throw error; + } + } +} +/** + * External Account Authorized User Client. This is used for OAuth2 credentials + * sourced using external identities through Workforce Identity Federation. + * Obtaining the initial access and refresh token can be done through the + * Google Cloud CLI. + */ +class ExternalAccountAuthorizedUserClient extends authclient_1.AuthClient { + /** + * Instantiates an ExternalAccountAuthorizedUserClient instances using the + * provided JSON object loaded from a credentials files. + * An error is throws if the credential is not valid. + * @param options The external account authorized user option object typically + * from the external accoutn authorized user JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + var _a; + super({ ...options, ...additionalOptions }); + if (options.universe_domain) { + this.universeDomain = options.universe_domain; + } + this.refreshToken = options.refresh_token; + const clientAuth = { + confidentialClientType: 'basic', + clientId: options.client_id, + clientSecret: options.client_secret, + }; + this.externalAccountAuthorizedUserHandler = + new ExternalAccountAuthorizedUserHandler((_a = options.token_url) !== null && _a !== void 0 ? _a : DEFAULT_TOKEN_URL.replace('{universeDomain}', this.universeDomain), this.transporter, clientAuth); + this.cachedAccessToken = null; + this.quotaProjectId = options.quota_project_id; + // As threshold could be zero, + // eagerRefreshThresholdMillis || EXPIRATION_TIME_OFFSET will override the + // zero value. + if (typeof (additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.eagerRefreshThresholdMillis) !== 'number') { + this.eagerRefreshThresholdMillis = baseexternalclient_1.EXPIRATION_TIME_OFFSET; + } + else { + this.eagerRefreshThresholdMillis = additionalOptions + .eagerRefreshThresholdMillis; + } + this.forceRefreshOnFailure = !!(additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.forceRefreshOnFailure); + } + async getAccessToken() { + // If cached access token is unavailable or expired, force refresh. + if (!this.cachedAccessToken || this.isExpired(this.cachedAccessToken)) { + await this.refreshAccessTokenAsync(); + } + // Return GCP access token in GetAccessTokenResponse format. + return { + token: this.cachedAccessToken.access_token, + res: this.cachedAccessToken.res, + }; + } + async getRequestHeaders() { + const accessTokenResponse = await this.getAccessToken(); + const headers = { + Authorization: `Bearer ${accessTokenResponse.token}`, + }; + return this.addSharedMetadataHeaders(headers); + } + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); + }); + } + else { + return this.requestAsync(opts); + } + } + /** + * Authenticates the provided HTTP request, processes it and resolves with the + * returned response. + * @param opts The HTTP request options. + * @param reAuthRetried Whether the current attempt is a retry after a failed attempt due to an auth failure. + * @return A promise that resolves with the successful response. + */ + async requestAsync(opts, reAuthRetried = false) { + let response; + try { + const requestHeaders = await this.getRequestHeaders(); + opts.headers = opts.headers || {}; + if (requestHeaders && requestHeaders['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = + requestHeaders['x-goog-user-project']; + } + if (requestHeaders && requestHeaders.Authorization) { + opts.headers.Authorization = requestHeaders.Authorization; + } + response = await this.transporter.request(opts); + } + catch (e) { + const res = e.response; + if (res) { + const statusCode = res.status; + // Retry the request for metadata if the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - forceRefreshOnFailure is true + const isReadableStream = res.config.data instanceof stream.Readable; + const isAuthErr = statusCode === 401 || statusCode === 403; + if (!reAuthRetried && + isAuthErr && + !isReadableStream && + this.forceRefreshOnFailure) { + await this.refreshAccessTokenAsync(); + return await this.requestAsync(opts, true); + } + } + throw e; + } + return response; + } + /** + * Forces token refresh, even if unexpired tokens are currently cached. + * @return A promise that resolves with the refreshed credential. + */ + async refreshAccessTokenAsync() { + // Refresh the access token using the refresh token. + const refreshResponse = await this.externalAccountAuthorizedUserHandler.refreshToken(this.refreshToken); + this.cachedAccessToken = { + access_token: refreshResponse.access_token, + expiry_date: new Date().getTime() + refreshResponse.expires_in * 1000, + res: refreshResponse.res, + }; + if (refreshResponse.refresh_token !== undefined) { + this.refreshToken = refreshResponse.refresh_token; + } + return this.cachedAccessToken; + } + /** + * Returns whether the provided credentials are expired or not. + * If there is no expiry time, assumes the token is not expired or expiring. + * @param credentials The credentials to check for expiration. + * @return Whether the credentials are expired or not. + */ + isExpired(credentials) { + const now = new Date().getTime(); + return credentials.expiry_date + ? now >= credentials.expiry_date - this.eagerRefreshThresholdMillis + : false; + } +} +exports.ExternalAccountAuthorizedUserClient = ExternalAccountAuthorizedUserClient; + + +/***/ }), + +/***/ 8323: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ExternalAccountClient = void 0; +const baseexternalclient_1 = __nccwpck_require__(142); +const identitypoolclient_1 = __nccwpck_require__(9960); +const awsclient_1 = __nccwpck_require__(1261); +const pluggable_auth_client_1 = __nccwpck_require__(6077); +/** + * Dummy class with no constructor. Developers are expected to use fromJSON. + */ +class ExternalAccountClient { + constructor() { + throw new Error('ExternalAccountClients should be initialized via: ' + + 'ExternalAccountClient.fromJSON(), ' + + 'directly via explicit constructors, eg. ' + + 'new AwsClient(options), new IdentityPoolClient(options), new' + + 'PluggableAuthClientOptions, or via ' + + 'new GoogleAuth(options).getClient()'); + } + /** + * This static method will instantiate the + * corresponding type of external account credential depending on the + * underlying credential source. + * @param options The external account options object typically loaded + * from the external account JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + * @return A BaseExternalAccountClient instance or null if the options + * provided do not correspond to an external account credential. + */ + static fromJSON(options, additionalOptions) { + var _a, _b; + if (options && options.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { + if ((_a = options.credential_source) === null || _a === void 0 ? void 0 : _a.environment_id) { + return new awsclient_1.AwsClient(options, additionalOptions); + } + else if ((_b = options.credential_source) === null || _b === void 0 ? void 0 : _b.executable) { + return new pluggable_auth_client_1.PluggableAuthClient(options, additionalOptions); + } + else { + return new identitypoolclient_1.IdentityPoolClient(options, additionalOptions); + } + } + else { + return null; + } + } +} +exports.ExternalAccountClient = ExternalAccountClient; + + +/***/ }), + +/***/ 932: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var _a, _b, _c; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.FileSubjectTokenSupplier = void 0; +const util_1 = __nccwpck_require__(9023); +const fs = __nccwpck_require__(9896); +// fs.readfile is undefined in browser karma tests causing +// `npm run browser-test` to fail as test.oauth2.ts imports this file via +// src/index.ts. +// Fallback to void function to avoid promisify throwing a TypeError. +const readFile = (0, util_1.promisify)((_a = fs.readFile) !== null && _a !== void 0 ? _a : (() => { })); +const realpath = (0, util_1.promisify)((_b = fs.realpath) !== null && _b !== void 0 ? _b : (() => { })); +const lstat = (0, util_1.promisify)((_c = fs.lstat) !== null && _c !== void 0 ? _c : (() => { })); +/** + * Internal subject token supplier implementation used when a file location + * is configured in the credential configuration used to build an {@link IdentityPoolClient} + */ +class FileSubjectTokenSupplier { + /** + * Instantiates a new file based subject token supplier. + * @param opts The file subject token supplier options to build the supplier + * with. + */ + constructor(opts) { + this.filePath = opts.filePath; + this.formatType = opts.formatType; + this.subjectTokenFieldName = opts.subjectTokenFieldName; + } + /** + * Returns the subject token stored at the file specified in the constructor. + * @param context {@link ExternalAccountSupplierContext} from the calling + * {@link IdentityPoolClient}, contains the requested audience and subject + * token type for the external account identity. Not used. + */ + async getSubjectToken(context) { + // Make sure there is a file at the path. lstatSync will throw if there is + // nothing there. + let parsedFilePath = this.filePath; + try { + // Resolve path to actual file in case of symlink. Expect a thrown error + // if not resolvable. + parsedFilePath = await realpath(parsedFilePath); + if (!(await lstat(parsedFilePath)).isFile()) { + throw new Error(); + } + } + catch (err) { + if (err instanceof Error) { + err.message = `The file at ${parsedFilePath} does not exist, or it is not a file. ${err.message}`; + } + throw err; + } + let subjectToken; + const rawText = await readFile(parsedFilePath, { encoding: 'utf8' }); + if (this.formatType === 'text') { + subjectToken = rawText; + } + else if (this.formatType === 'json' && this.subjectTokenFieldName) { + const json = JSON.parse(rawText); + subjectToken = json[this.subjectTokenFieldName]; + } + if (!subjectToken) { + throw new Error('Unable to parse the subject_token from the credential_source file'); + } + return subjectToken; + } +} +exports.FileSubjectTokenSupplier = FileSubjectTokenSupplier; + + +/***/ }), + +/***/ 5934: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var _GoogleAuth_instances, _GoogleAuth_pendingAuthClient, _GoogleAuth_prepareAndCacheClient, _GoogleAuth_determineClient; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GoogleAuth = exports.GoogleAuthExceptionMessages = exports.CLOUD_SDK_CLIENT_ID = void 0; +const child_process_1 = __nccwpck_require__(5317); +const fs = __nccwpck_require__(9896); +const gcpMetadata = __nccwpck_require__(3046); +const os = __nccwpck_require__(857); +const path = __nccwpck_require__(6928); +const crypto_1 = __nccwpck_require__(8851); +const transporters_1 = __nccwpck_require__(7633); +const computeclient_1 = __nccwpck_require__(977); +const idtokenclient_1 = __nccwpck_require__(2718); +const envDetect_1 = __nccwpck_require__(963); +const jwtclient_1 = __nccwpck_require__(5277); +const refreshclient_1 = __nccwpck_require__(9807); +const impersonated_1 = __nccwpck_require__(9964); +const externalclient_1 = __nccwpck_require__(8323); +const baseexternalclient_1 = __nccwpck_require__(142); +const authclient_1 = __nccwpck_require__(4810); +const externalAccountAuthorizedUserClient_1 = __nccwpck_require__(4240); +const util_1 = __nccwpck_require__(7870); +exports.CLOUD_SDK_CLIENT_ID = '764086051850-6qr4p6gpi6hn506pt8ejuq83di341hur.apps.googleusercontent.com'; +exports.GoogleAuthExceptionMessages = { + API_KEY_WITH_CREDENTIALS: 'API Keys and Credentials are mutually exclusive authentication methods and cannot be used together.', + NO_PROJECT_ID_FOUND: 'Unable to detect a Project Id in the current environment. \n' + + 'To learn more about authentication and Google APIs, visit: \n' + + 'https://cloud.google.com/docs/authentication/getting-started', + NO_CREDENTIALS_FOUND: 'Unable to find credentials in current environment. \n' + + 'To learn more about authentication and Google APIs, visit: \n' + + 'https://cloud.google.com/docs/authentication/getting-started', + NO_ADC_FOUND: 'Could not load the default credentials. Browse to https://cloud.google.com/docs/authentication/getting-started for more information.', + NO_UNIVERSE_DOMAIN_FOUND: 'Unable to detect a Universe Domain in the current environment.\n' + + 'To learn more about Universe Domain retrieval, visit: \n' + + 'https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys', +}; +class GoogleAuth { + // Note: this properly is only public to satisfy unit tests. + // https://github.com/Microsoft/TypeScript/issues/5228 + get isGCE() { + return this.checkIsGCE; + } + /** + * Configuration is resolved in the following order of precedence: + * - {@link GoogleAuthOptions.credentials `credentials`} + * - {@link GoogleAuthOptions.keyFilename `keyFilename`} + * - {@link GoogleAuthOptions.keyFile `keyFile`} + * + * {@link GoogleAuthOptions.clientOptions `clientOptions`} are passed to the + * {@link AuthClient `AuthClient`s}. + * + * @param opts + */ + constructor(opts = {}) { + _GoogleAuth_instances.add(this); + /** + * Caches a value indicating whether the auth layer is running on Google + * Compute Engine. + * @private + */ + this.checkIsGCE = undefined; + // To save the contents of the JSON credential file + this.jsonContent = null; + this.cachedCredential = null; + /** + * A pending {@link AuthClient}. Used for concurrent {@link GoogleAuth.getClient} calls. + */ + _GoogleAuth_pendingAuthClient.set(this, null); + this.clientOptions = {}; + this._cachedProjectId = opts.projectId || null; + this.cachedCredential = opts.authClient || null; + this.keyFilename = opts.keyFilename || opts.keyFile; + this.scopes = opts.scopes; + this.clientOptions = opts.clientOptions || {}; + this.jsonContent = opts.credentials || null; + this.apiKey = opts.apiKey || this.clientOptions.apiKey || null; + // Cannot use both API Key + Credentials + if (this.apiKey && (this.jsonContent || this.clientOptions.credentials)) { + throw new RangeError(exports.GoogleAuthExceptionMessages.API_KEY_WITH_CREDENTIALS); + } + if (opts.universeDomain) { + this.clientOptions.universeDomain = opts.universeDomain; + } + } + // GAPIC client libraries should always use self-signed JWTs. The following + // variables are set on the JWT client in order to indicate the type of library, + // and sign the JWT with the correct audience and scopes (if not supplied). + setGapicJWTValues(client) { + client.defaultServicePath = this.defaultServicePath; + client.useJWTAccessWithScope = this.useJWTAccessWithScope; + client.defaultScopes = this.defaultScopes; + } + getProjectId(callback) { + if (callback) { + this.getProjectIdAsync().then(r => callback(null, r), callback); + } + else { + return this.getProjectIdAsync(); + } + } + /** + * A temporary method for internal `getProjectId` usages where `null` is + * acceptable. In a future major release, `getProjectId` should return `null` + * (as the `Promise` base signature describes) and this private + * method should be removed. + * + * @returns Promise that resolves with project id (or `null`) + */ + async getProjectIdOptional() { + try { + return await this.getProjectId(); + } + catch (e) { + if (e instanceof Error && + e.message === exports.GoogleAuthExceptionMessages.NO_PROJECT_ID_FOUND) { + return null; + } + else { + throw e; + } + } + } + /** + * A private method for finding and caching a projectId. + * + * Supports environments in order of precedence: + * - GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variable + * - GOOGLE_APPLICATION_CREDENTIALS JSON file + * - Cloud SDK: `gcloud config config-helper --format json` + * - GCE project ID from metadata server + * + * @returns projectId + */ + async findAndCacheProjectId() { + let projectId = null; + projectId || (projectId = await this.getProductionProjectId()); + projectId || (projectId = await this.getFileProjectId()); + projectId || (projectId = await this.getDefaultServiceProjectId()); + projectId || (projectId = await this.getGCEProjectId()); + projectId || (projectId = await this.getExternalAccountClientProjectId()); + if (projectId) { + this._cachedProjectId = projectId; + return projectId; + } + else { + throw new Error(exports.GoogleAuthExceptionMessages.NO_PROJECT_ID_FOUND); + } + } + async getProjectIdAsync() { + if (this._cachedProjectId) { + return this._cachedProjectId; + } + if (!this._findProjectIdPromise) { + this._findProjectIdPromise = this.findAndCacheProjectId(); + } + return this._findProjectIdPromise; + } + /** + * Retrieves a universe domain from the metadata server via + * {@link gcpMetadata.universe}. + * + * @returns a universe domain + */ + async getUniverseDomainFromMetadataServer() { + var _a; + let universeDomain; + try { + universeDomain = await gcpMetadata.universe('universe-domain'); + universeDomain || (universeDomain = authclient_1.DEFAULT_UNIVERSE); + } + catch (e) { + if (e && ((_a = e === null || e === void 0 ? void 0 : e.response) === null || _a === void 0 ? void 0 : _a.status) === 404) { + universeDomain = authclient_1.DEFAULT_UNIVERSE; + } + else { + throw e; + } + } + return universeDomain; + } + /** + * Retrieves, caches, and returns the universe domain in the following order + * of precedence: + * - The universe domain in {@link GoogleAuth.clientOptions} + * - An existing or ADC {@link AuthClient}'s universe domain + * - {@link gcpMetadata.universe}, if {@link Compute} client + * + * @returns The universe domain + */ + async getUniverseDomain() { + let universeDomain = (0, util_1.originalOrCamelOptions)(this.clientOptions).get('universe_domain'); + try { + universeDomain !== null && universeDomain !== void 0 ? universeDomain : (universeDomain = (await this.getClient()).universeDomain); + } + catch (_a) { + // client or ADC is not available + universeDomain !== null && universeDomain !== void 0 ? universeDomain : (universeDomain = authclient_1.DEFAULT_UNIVERSE); + } + return universeDomain; + } + /** + * @returns Any scopes (user-specified or default scopes specified by the + * client library) that need to be set on the current Auth client. + */ + getAnyScopes() { + return this.scopes || this.defaultScopes; + } + getApplicationDefault(optionsOrCallback = {}, callback) { + let options; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else { + options = optionsOrCallback; + } + if (callback) { + this.getApplicationDefaultAsync(options).then(r => callback(null, r.credential, r.projectId), callback); + } + else { + return this.getApplicationDefaultAsync(options); + } + } + async getApplicationDefaultAsync(options = {}) { + // If we've already got a cached credential, return it. + // This will also preserve one's configured quota project, in case they + // set one directly on the credential previously. + if (this.cachedCredential) { + // cache, while preserving existing quota project preferences + return await __classPrivateFieldGet(this, _GoogleAuth_instances, "m", _GoogleAuth_prepareAndCacheClient).call(this, this.cachedCredential, null); + } + let credential; + // Check for the existence of a local environment variable pointing to the + // location of the credential file. This is typically used in local + // developer scenarios. + credential = + await this._tryGetApplicationCredentialsFromEnvironmentVariable(options); + if (credential) { + if (credential instanceof jwtclient_1.JWT) { + credential.scopes = this.scopes; + } + else if (credential instanceof baseexternalclient_1.BaseExternalAccountClient) { + credential.scopes = this.getAnyScopes(); + } + return await __classPrivateFieldGet(this, _GoogleAuth_instances, "m", _GoogleAuth_prepareAndCacheClient).call(this, credential); + } + // Look in the well-known credential file location. + credential = + await this._tryGetApplicationCredentialsFromWellKnownFile(options); + if (credential) { + if (credential instanceof jwtclient_1.JWT) { + credential.scopes = this.scopes; + } + else if (credential instanceof baseexternalclient_1.BaseExternalAccountClient) { + credential.scopes = this.getAnyScopes(); + } + return await __classPrivateFieldGet(this, _GoogleAuth_instances, "m", _GoogleAuth_prepareAndCacheClient).call(this, credential); + } + // Determine if we're running on GCE. + if (await this._checkIsGCE()) { + options.scopes = this.getAnyScopes(); + return await __classPrivateFieldGet(this, _GoogleAuth_instances, "m", _GoogleAuth_prepareAndCacheClient).call(this, new computeclient_1.Compute(options)); + } + throw new Error(exports.GoogleAuthExceptionMessages.NO_ADC_FOUND); + } + /** + * Determines whether the auth layer is running on Google Compute Engine. + * Checks for GCP Residency, then fallback to checking if metadata server + * is available. + * + * @returns A promise that resolves with the boolean. + * @api private + */ + async _checkIsGCE() { + if (this.checkIsGCE === undefined) { + this.checkIsGCE = + gcpMetadata.getGCPResidency() || (await gcpMetadata.isAvailable()); + } + return this.checkIsGCE; + } + /** + * Attempts to load default credentials from the environment variable path.. + * @returns Promise that resolves with the OAuth2Client or null. + * @api private + */ + async _tryGetApplicationCredentialsFromEnvironmentVariable(options) { + const credentialsPath = process.env['GOOGLE_APPLICATION_CREDENTIALS'] || + process.env['google_application_credentials']; + if (!credentialsPath || credentialsPath.length === 0) { + return null; + } + try { + return this._getApplicationCredentialsFromFilePath(credentialsPath, options); + } + catch (e) { + if (e instanceof Error) { + e.message = `Unable to read the credential file specified by the GOOGLE_APPLICATION_CREDENTIALS environment variable: ${e.message}`; + } + throw e; + } + } + /** + * Attempts to load default credentials from a well-known file location + * @return Promise that resolves with the OAuth2Client or null. + * @api private + */ + async _tryGetApplicationCredentialsFromWellKnownFile(options) { + // First, figure out the location of the file, depending upon the OS type. + let location = null; + if (this._isWindows()) { + // Windows + location = process.env['APPDATA']; + } + else { + // Linux or Mac + const home = process.env['HOME']; + if (home) { + location = path.join(home, '.config'); + } + } + // If we found the root path, expand it. + if (location) { + location = path.join(location, 'gcloud', 'application_default_credentials.json'); + if (!fs.existsSync(location)) { + location = null; + } + } + // The file does not exist. + if (!location) { + return null; + } + // The file seems to exist. Try to use it. + const client = await this._getApplicationCredentialsFromFilePath(location, options); + return client; + } + /** + * Attempts to load default credentials from a file at the given path.. + * @param filePath The path to the file to read. + * @returns Promise that resolves with the OAuth2Client + * @api private + */ + async _getApplicationCredentialsFromFilePath(filePath, options = {}) { + // Make sure the path looks like a string. + if (!filePath || filePath.length === 0) { + throw new Error('The file path is invalid.'); + } + // Make sure there is a file at the path. lstatSync will throw if there is + // nothing there. + try { + // Resolve path to actual file in case of symlink. Expect a thrown error + // if not resolvable. + filePath = fs.realpathSync(filePath); + if (!fs.lstatSync(filePath).isFile()) { + throw new Error(); + } + } + catch (err) { + if (err instanceof Error) { + err.message = `The file at ${filePath} does not exist, or it is not a file. ${err.message}`; + } + throw err; + } + // Now open a read stream on the file, and parse it. + const readStream = fs.createReadStream(filePath); + return this.fromStream(readStream, options); + } + /** + * Create a credentials instance using a given impersonated input options. + * @param json The impersonated input object. + * @returns JWT or UserRefresh Client with data + */ + fromImpersonatedJSON(json) { + var _a, _b, _c, _d; + if (!json) { + throw new Error('Must pass in a JSON object containing an impersonated refresh token'); + } + if (json.type !== impersonated_1.IMPERSONATED_ACCOUNT_TYPE) { + throw new Error(`The incoming JSON object does not have the "${impersonated_1.IMPERSONATED_ACCOUNT_TYPE}" type`); + } + if (!json.source_credentials) { + throw new Error('The incoming JSON object does not contain a source_credentials field'); + } + if (!json.service_account_impersonation_url) { + throw new Error('The incoming JSON object does not contain a service_account_impersonation_url field'); + } + const sourceClient = this.fromJSON(json.source_credentials); + if (((_a = json.service_account_impersonation_url) === null || _a === void 0 ? void 0 : _a.length) > 256) { + /** + * Prevents DOS attacks. + * @see {@link https://github.com/googleapis/google-auth-library-nodejs/security/code-scanning/85} + **/ + throw new RangeError(`Target principal is too long: ${json.service_account_impersonation_url}`); + } + // Extract service account from service_account_impersonation_url + const targetPrincipal = (_c = (_b = /(?[^/]+):(generateAccessToken|generateIdToken)$/.exec(json.service_account_impersonation_url)) === null || _b === void 0 ? void 0 : _b.groups) === null || _c === void 0 ? void 0 : _c.target; + if (!targetPrincipal) { + throw new RangeError(`Cannot extract target principal from ${json.service_account_impersonation_url}`); + } + const targetScopes = (_d = this.getAnyScopes()) !== null && _d !== void 0 ? _d : []; + return new impersonated_1.Impersonated({ + ...json, + sourceClient, + targetPrincipal, + targetScopes: Array.isArray(targetScopes) ? targetScopes : [targetScopes], + }); + } + /** + * Create a credentials instance using the given input options. + * This client is not cached. + * + * **Important**: If you accept a credential configuration (credential JSON/File/Stream) from an external source for authentication to Google Cloud, you must validate it before providing it to any Google API or library. Providing an unvalidated credential configuration to Google APIs can compromise the security of your systems and data. For more information, refer to {@link https://cloud.google.com/docs/authentication/external/externally-sourced-credentials Validate credential configurations from external sources}. + * + * @param json The input object. + * @param options The JWT or UserRefresh options for the client + * @returns JWT or UserRefresh Client with data + */ + fromJSON(json, options = {}) { + let client; + // user's preferred universe domain + const preferredUniverseDomain = (0, util_1.originalOrCamelOptions)(options).get('universe_domain'); + if (json.type === refreshclient_1.USER_REFRESH_ACCOUNT_TYPE) { + client = new refreshclient_1.UserRefreshClient(options); + client.fromJSON(json); + } + else if (json.type === impersonated_1.IMPERSONATED_ACCOUNT_TYPE) { + client = this.fromImpersonatedJSON(json); + } + else if (json.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { + client = externalclient_1.ExternalAccountClient.fromJSON(json, options); + client.scopes = this.getAnyScopes(); + } + else if (json.type === externalAccountAuthorizedUserClient_1.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE) { + client = new externalAccountAuthorizedUserClient_1.ExternalAccountAuthorizedUserClient(json, options); + } + else { + options.scopes = this.scopes; + client = new jwtclient_1.JWT(options); + this.setGapicJWTValues(client); + client.fromJSON(json); + } + if (preferredUniverseDomain) { + client.universeDomain = preferredUniverseDomain; + } + return client; + } + /** + * Return a JWT or UserRefreshClient from JavaScript object, caching both the + * object used to instantiate and the client. + * @param json The input object. + * @param options The JWT or UserRefresh options for the client + * @returns JWT or UserRefresh Client with data + */ + _cacheClientFromJSON(json, options) { + const client = this.fromJSON(json, options); + // cache both raw data used to instantiate client and client itself. + this.jsonContent = json; + this.cachedCredential = client; + return client; + } + fromStream(inputStream, optionsOrCallback = {}, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else { + options = optionsOrCallback; + } + if (callback) { + this.fromStreamAsync(inputStream, options).then(r => callback(null, r), callback); + } + else { + return this.fromStreamAsync(inputStream, options); + } + } + fromStreamAsync(inputStream, options) { + return new Promise((resolve, reject) => { + if (!inputStream) { + throw new Error('Must pass in a stream containing the Google auth settings.'); + } + const chunks = []; + inputStream + .setEncoding('utf8') + .on('error', reject) + .on('data', chunk => chunks.push(chunk)) + .on('end', () => { + try { + try { + const data = JSON.parse(chunks.join('')); + const r = this._cacheClientFromJSON(data, options); + return resolve(r); + } + catch (err) { + // If we failed parsing this.keyFileName, assume that it + // is a PEM or p12 certificate: + if (!this.keyFilename) + throw err; + const client = new jwtclient_1.JWT({ + ...this.clientOptions, + keyFile: this.keyFilename, + }); + this.cachedCredential = client; + this.setGapicJWTValues(client); + return resolve(client); + } + } + catch (err) { + return reject(err); + } + }); + }); + } + /** + * Create a credentials instance using the given API key string. + * The created client is not cached. In order to create and cache it use the {@link GoogleAuth.getClient `getClient`} method after first providing an {@link GoogleAuth.apiKey `apiKey`}. + * + * @param apiKey The API key string + * @param options An optional options object. + * @returns A JWT loaded from the key + */ + fromAPIKey(apiKey, options = {}) { + return new jwtclient_1.JWT({ ...options, apiKey }); + } + /** + * Determines whether the current operating system is Windows. + * @api private + */ + _isWindows() { + const sys = os.platform(); + if (sys && sys.length >= 3) { + if (sys.substring(0, 3).toLowerCase() === 'win') { + return true; + } + } + return false; + } + /** + * Run the Google Cloud SDK command that prints the default project ID + */ + async getDefaultServiceProjectId() { + return new Promise(resolve => { + (0, child_process_1.exec)('gcloud config config-helper --format json', (err, stdout) => { + if (!err && stdout) { + try { + const projectId = JSON.parse(stdout).configuration.properties.core.project; + resolve(projectId); + return; + } + catch (e) { + // ignore errors + } + } + resolve(null); + }); + }); + } + /** + * Loads the project id from environment variables. + * @api private + */ + getProductionProjectId() { + return (process.env['GCLOUD_PROJECT'] || + process.env['GOOGLE_CLOUD_PROJECT'] || + process.env['gcloud_project'] || + process.env['google_cloud_project']); + } + /** + * Loads the project id from the GOOGLE_APPLICATION_CREDENTIALS json file. + * @api private + */ + async getFileProjectId() { + if (this.cachedCredential) { + // Try to read the project ID from the cached credentials file + return this.cachedCredential.projectId; + } + // Ensure the projectId is loaded from the keyFile if available. + if (this.keyFilename) { + const creds = await this.getClient(); + if (creds && creds.projectId) { + return creds.projectId; + } + } + // Try to load a credentials file and read its project ID + const r = await this._tryGetApplicationCredentialsFromEnvironmentVariable(); + if (r) { + return r.projectId; + } + else { + return null; + } + } + /** + * Gets the project ID from external account client if available. + */ + async getExternalAccountClientProjectId() { + if (!this.jsonContent || this.jsonContent.type !== baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { + return null; + } + const creds = await this.getClient(); + // Do not suppress the underlying error, as the error could contain helpful + // information for debugging and fixing. This is especially true for + // external account creds as in order to get the project ID, the following + // operations have to succeed: + // 1. Valid credentials file should be supplied. + // 2. Ability to retrieve access tokens from STS token exchange API. + // 3. Ability to exchange for service account impersonated credentials (if + // enabled). + // 4. Ability to get project info using the access token from step 2 or 3. + // Without surfacing the error, it is harder for developers to determine + // which step went wrong. + return await creds.getProjectId(); + } + /** + * Gets the Compute Engine project ID if it can be inferred. + */ + async getGCEProjectId() { + try { + const r = await gcpMetadata.project('project-id'); + return r; + } + catch (e) { + // Ignore any errors + return null; + } + } + getCredentials(callback) { + if (callback) { + this.getCredentialsAsync().then(r => callback(null, r), callback); + } + else { + return this.getCredentialsAsync(); + } + } + async getCredentialsAsync() { + const client = await this.getClient(); + if (client instanceof impersonated_1.Impersonated) { + return { client_email: client.getTargetPrincipal() }; + } + if (client instanceof baseexternalclient_1.BaseExternalAccountClient) { + const serviceAccountEmail = client.getServiceAccountEmail(); + if (serviceAccountEmail) { + return { + client_email: serviceAccountEmail, + universe_domain: client.universeDomain, + }; + } + } + if (this.jsonContent) { + return { + client_email: this.jsonContent.client_email, + private_key: this.jsonContent.private_key, + universe_domain: this.jsonContent.universe_domain, + }; + } + if (await this._checkIsGCE()) { + const [client_email, universe_domain] = await Promise.all([ + gcpMetadata.instance('service-accounts/default/email'), + this.getUniverseDomain(), + ]); + return { client_email, universe_domain }; + } + throw new Error(exports.GoogleAuthExceptionMessages.NO_CREDENTIALS_FOUND); + } + /** + * Automatically obtain an {@link AuthClient `AuthClient`} based on the + * provided configuration. If no options were passed, use Application + * Default Credentials. + */ + async getClient() { + if (this.cachedCredential) { + return this.cachedCredential; + } + // Use an existing auth client request, or cache a new one + __classPrivateFieldSet(this, _GoogleAuth_pendingAuthClient, __classPrivateFieldGet(this, _GoogleAuth_pendingAuthClient, "f") || __classPrivateFieldGet(this, _GoogleAuth_instances, "m", _GoogleAuth_determineClient).call(this), "f"); + try { + return await __classPrivateFieldGet(this, _GoogleAuth_pendingAuthClient, "f"); + } + finally { + // reset the pending auth client in case it is changed later + __classPrivateFieldSet(this, _GoogleAuth_pendingAuthClient, null, "f"); + } + } + /** + * Creates a client which will fetch an ID token for authorization. + * @param targetAudience the audience for the fetched ID token. + * @returns IdTokenClient for making HTTP calls authenticated with ID tokens. + */ + async getIdTokenClient(targetAudience) { + const client = await this.getClient(); + if (!('fetchIdToken' in client)) { + throw new Error('Cannot fetch ID token in this environment, use GCE or set the GOOGLE_APPLICATION_CREDENTIALS environment variable to a service account credentials JSON file.'); + } + return new idtokenclient_1.IdTokenClient({ targetAudience, idTokenProvider: client }); + } + /** + * Automatically obtain application default credentials, and return + * an access token for making requests. + */ + async getAccessToken() { + const client = await this.getClient(); + return (await client.getAccessToken()).token; + } + /** + * Obtain the HTTP headers that will provide authorization for a given + * request. + */ + async getRequestHeaders(url) { + const client = await this.getClient(); + return client.getRequestHeaders(url); + } + /** + * Obtain credentials for a request, then attach the appropriate headers to + * the request options. + * @param opts Axios or Request options on which to attach the headers + */ + async authorizeRequest(opts) { + opts = opts || {}; + const url = opts.url || opts.uri; + const client = await this.getClient(); + const headers = await client.getRequestHeaders(url); + opts.headers = Object.assign(opts.headers || {}, headers); + return opts; + } + /** + * Automatically obtain application default credentials, and make an + * HTTP request using the given options. + * @param opts Axios request options for the HTTP request. + */ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + async request(opts) { + const client = await this.getClient(); + return client.request(opts); + } + /** + * Determine the compute environment in which the code is running. + */ + getEnv() { + return (0, envDetect_1.getEnv)(); + } + /** + * Sign the given data with the current private key, or go out + * to the IAM API to sign it. + * @param data The data to be signed. + * @param endpoint A custom endpoint to use. + * + * @example + * ``` + * sign('data', 'https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/'); + * ``` + */ + async sign(data, endpoint) { + const client = await this.getClient(); + const universe = await this.getUniverseDomain(); + endpoint = + endpoint || + `https://iamcredentials.${universe}/v1/projects/-/serviceAccounts/`; + if (client instanceof impersonated_1.Impersonated) { + const signed = await client.sign(data); + return signed.signedBlob; + } + const crypto = (0, crypto_1.createCrypto)(); + if (client instanceof jwtclient_1.JWT && client.key) { + const sign = await crypto.sign(client.key, data); + return sign; + } + const creds = await this.getCredentials(); + if (!creds.client_email) { + throw new Error('Cannot sign data without `client_email`.'); + } + return this.signBlob(crypto, creds.client_email, data, endpoint); + } + async signBlob(crypto, emailOrUniqueId, data, endpoint) { + const url = new URL(endpoint + `${emailOrUniqueId}:signBlob`); + const res = await this.request({ + method: 'POST', + url: url.href, + data: { + payload: crypto.encodeBase64StringUtf8(data), + }, + retry: true, + retryConfig: { + httpMethodsToRetry: ['POST'], + }, + }); + return res.data.signedBlob; + } +} +exports.GoogleAuth = GoogleAuth; +_GoogleAuth_pendingAuthClient = new WeakMap(), _GoogleAuth_instances = new WeakSet(), _GoogleAuth_prepareAndCacheClient = async function _GoogleAuth_prepareAndCacheClient(credential, quotaProjectIdOverride = process.env['GOOGLE_CLOUD_QUOTA_PROJECT'] || null) { + const projectId = await this.getProjectIdOptional(); + if (quotaProjectIdOverride) { + credential.quotaProjectId = quotaProjectIdOverride; + } + this.cachedCredential = credential; + return { credential, projectId }; +}, _GoogleAuth_determineClient = async function _GoogleAuth_determineClient() { + if (this.jsonContent) { + return this._cacheClientFromJSON(this.jsonContent, this.clientOptions); + } + else if (this.keyFilename) { + const filePath = path.resolve(this.keyFilename); + const stream = fs.createReadStream(filePath); + return await this.fromStreamAsync(stream, this.clientOptions); + } + else if (this.apiKey) { + const client = await this.fromAPIKey(this.apiKey, this.clientOptions); + client.scopes = this.scopes; + const { credential } = await __classPrivateFieldGet(this, _GoogleAuth_instances, "m", _GoogleAuth_prepareAndCacheClient).call(this, client); + return credential; + } + else { + const { credential } = await this.getApplicationDefaultAsync(this.clientOptions); + return credential; + } +}; +/** + * Export DefaultTransporter as a static property of the class. + */ +GoogleAuth.DefaultTransporter = transporters_1.DefaultTransporter; + + +/***/ }), + +/***/ 7009: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2014 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.IAMAuth = void 0; +class IAMAuth { + /** + * IAM credentials. + * + * @param selector the iam authority selector + * @param token the token + * @constructor + */ + constructor(selector, token) { + this.selector = selector; + this.token = token; + this.selector = selector; + this.token = token; + } + /** + * Acquire the HTTP headers required to make an authenticated request. + */ + getRequestHeaders() { + return { + 'x-goog-iam-authority-selector': this.selector, + 'x-goog-iam-authorization-token': this.token, + }; + } +} +exports.IAMAuth = IAMAuth; + + +/***/ }), + +/***/ 9960: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.IdentityPoolClient = void 0; +const baseexternalclient_1 = __nccwpck_require__(142); +const util_1 = __nccwpck_require__(7870); +const filesubjecttokensupplier_1 = __nccwpck_require__(932); +const urlsubjecttokensupplier_1 = __nccwpck_require__(4627); +/** + * Defines the Url-sourced and file-sourced external account clients mainly + * used for K8s and Azure workloads. + */ +class IdentityPoolClient extends baseexternalclient_1.BaseExternalAccountClient { + /** + * Instantiate an IdentityPoolClient instance using the provided JSON + * object loaded from an external account credentials file. + * An error is thrown if the credential is not a valid file-sourced or + * url-sourced credential or a workforce pool user project is provided + * with a non workforce audience. + * @param options The external account options object typically loaded + * from the external account JSON credential file. The camelCased options + * are aliases for the snake_cased options. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + super(options, additionalOptions); + const opts = (0, util_1.originalOrCamelOptions)(options); + const credentialSource = opts.get('credential_source'); + const subjectTokenSupplier = opts.get('subject_token_supplier'); + // Validate credential sourcing configuration. + if (!credentialSource && !subjectTokenSupplier) { + throw new Error('A credential source or subject token supplier must be specified.'); + } + if (credentialSource && subjectTokenSupplier) { + throw new Error('Only one of credential source or subject token supplier can be specified.'); + } + if (subjectTokenSupplier) { + this.subjectTokenSupplier = subjectTokenSupplier; + this.credentialSourceType = 'programmatic'; + } + else { + const credentialSourceOpts = (0, util_1.originalOrCamelOptions)(credentialSource); + const formatOpts = (0, util_1.originalOrCamelOptions)(credentialSourceOpts.get('format')); + // Text is the default format type. + const formatType = formatOpts.get('type') || 'text'; + const formatSubjectTokenFieldName = formatOpts.get('subject_token_field_name'); + if (formatType !== 'json' && formatType !== 'text') { + throw new Error(`Invalid credential_source format "${formatType}"`); + } + if (formatType === 'json' && !formatSubjectTokenFieldName) { + throw new Error('Missing subject_token_field_name for JSON credential_source format'); + } + const file = credentialSourceOpts.get('file'); + const url = credentialSourceOpts.get('url'); + const headers = credentialSourceOpts.get('headers'); + if (file && url) { + throw new Error('No valid Identity Pool "credential_source" provided, must be either file or url.'); + } + else if (file && !url) { + this.credentialSourceType = 'file'; + this.subjectTokenSupplier = new filesubjecttokensupplier_1.FileSubjectTokenSupplier({ + filePath: file, + formatType: formatType, + subjectTokenFieldName: formatSubjectTokenFieldName, + }); + } + else if (!file && url) { + this.credentialSourceType = 'url'; + this.subjectTokenSupplier = new urlsubjecttokensupplier_1.UrlSubjectTokenSupplier({ + url: url, + formatType: formatType, + subjectTokenFieldName: formatSubjectTokenFieldName, + headers: headers, + additionalGaxiosOptions: IdentityPoolClient.RETRY_CONFIG, + }); + } + else { + throw new Error('No valid Identity Pool "credential_source" provided, must be either file or url.'); + } + } + } + /** + * Triggered when a external subject token is needed to be exchanged for a GCP + * access token via GCP STS endpoint. Gets a subject token by calling + * the configured {@link SubjectTokenSupplier} + * @return A promise that resolves with the external subject token. + */ + async retrieveSubjectToken() { + return this.subjectTokenSupplier.getSubjectToken(this.supplierContext); + } +} +exports.IdentityPoolClient = IdentityPoolClient; + + +/***/ }), + +/***/ 2718: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.IdTokenClient = void 0; +const oauth2client_1 = __nccwpck_require__(91); +class IdTokenClient extends oauth2client_1.OAuth2Client { + /** + * Google ID Token client + * + * Retrieve ID token from the metadata server. + * See: https://cloud.google.com/docs/authentication/get-id-token#metadata-server + */ + constructor(options) { + super(options); + this.targetAudience = options.targetAudience; + this.idTokenProvider = options.idTokenProvider; + } + async getRequestMetadataAsync( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + url) { + if (!this.credentials.id_token || + !this.credentials.expiry_date || + this.isTokenExpiring()) { + const idToken = await this.idTokenProvider.fetchIdToken(this.targetAudience); + this.credentials = { + id_token: idToken, + expiry_date: this.getIdTokenExpiryDate(idToken), + }; + } + const headers = { + Authorization: 'Bearer ' + this.credentials.id_token, + }; + return { headers }; + } + getIdTokenExpiryDate(idToken) { + const payloadB64 = idToken.split('.')[1]; + if (payloadB64) { + const payload = JSON.parse(Buffer.from(payloadB64, 'base64').toString('ascii')); + return payload.exp * 1000; + } + } +} +exports.IdTokenClient = IdTokenClient; + + +/***/ }), + +/***/ 9964: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/** + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Impersonated = exports.IMPERSONATED_ACCOUNT_TYPE = void 0; +const oauth2client_1 = __nccwpck_require__(91); +const gaxios_1 = __nccwpck_require__(7003); +const util_1 = __nccwpck_require__(7870); +exports.IMPERSONATED_ACCOUNT_TYPE = 'impersonated_service_account'; +class Impersonated extends oauth2client_1.OAuth2Client { + /** + * Impersonated service account credentials. + * + * Create a new access token by impersonating another service account. + * + * Impersonated Credentials allowing credentials issued to a user or + * service account to impersonate another. The source project using + * Impersonated Credentials must enable the "IAMCredentials" API. + * Also, the target service account must grant the orginating principal + * the "Service Account Token Creator" IAM role. + * + * @param {object} options - The configuration object. + * @param {object} [options.sourceClient] the source credential used as to + * acquire the impersonated credentials. + * @param {string} [options.targetPrincipal] the service account to + * impersonate. + * @param {string[]} [options.delegates] the chained list of delegates + * required to grant the final access_token. If set, the sequence of + * identities must have "Service Account Token Creator" capability granted to + * the preceding identity. For example, if set to [serviceAccountB, + * serviceAccountC], the sourceCredential must have the Token Creator role on + * serviceAccountB. serviceAccountB must have the Token Creator on + * serviceAccountC. Finally, C must have Token Creator on target_principal. + * If left unset, sourceCredential must have that role on targetPrincipal. + * @param {string[]} [options.targetScopes] scopes to request during the + * authorization grant. + * @param {number} [options.lifetime] number of seconds the delegated + * credential should be valid for up to 3600 seconds by default, or 43,200 + * seconds by extending the token's lifetime, see: + * https://cloud.google.com/iam/docs/creating-short-lived-service-account-credentials#sa-credentials-oauth + * @param {string} [options.endpoint] api endpoint override. + */ + constructor(options = {}) { + var _a, _b, _c, _d, _e, _f; + super(options); + // Start with an expired refresh token, which will automatically be + // refreshed before the first API call is made. + this.credentials = { + expiry_date: 1, + refresh_token: 'impersonated-placeholder', + }; + this.sourceClient = (_a = options.sourceClient) !== null && _a !== void 0 ? _a : new oauth2client_1.OAuth2Client(); + this.targetPrincipal = (_b = options.targetPrincipal) !== null && _b !== void 0 ? _b : ''; + this.delegates = (_c = options.delegates) !== null && _c !== void 0 ? _c : []; + this.targetScopes = (_d = options.targetScopes) !== null && _d !== void 0 ? _d : []; + this.lifetime = (_e = options.lifetime) !== null && _e !== void 0 ? _e : 3600; + const usingExplicitUniverseDomain = !!(0, util_1.originalOrCamelOptions)(options).get('universe_domain'); + if (!usingExplicitUniverseDomain) { + // override the default universe with the source's universe + this.universeDomain = this.sourceClient.universeDomain; + } + else if (this.sourceClient.universeDomain !== this.universeDomain) { + // non-default universe and is not matching the source - this could be a credential leak + throw new RangeError(`Universe domain ${this.sourceClient.universeDomain} in source credentials does not match ${this.universeDomain} universe domain set for impersonated credentials.`); + } + this.endpoint = + (_f = options.endpoint) !== null && _f !== void 0 ? _f : `https://iamcredentials.${this.universeDomain}`; + } + /** + * Signs some bytes. + * + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob Reference Documentation} + * @param blobToSign String to sign. + * + * @returns A {@link SignBlobResponse} denoting the keyID and signedBlob in base64 string + */ + async sign(blobToSign) { + await this.sourceClient.getAccessToken(); + const name = `projects/-/serviceAccounts/${this.targetPrincipal}`; + const u = `${this.endpoint}/v1/${name}:signBlob`; + const body = { + delegates: this.delegates, + payload: Buffer.from(blobToSign).toString('base64'), + }; + const res = await this.sourceClient.request({ + ...Impersonated.RETRY_CONFIG, + url: u, + data: body, + method: 'POST', + }); + return res.data; + } + /** The service account email to be impersonated. */ + getTargetPrincipal() { + return this.targetPrincipal; + } + /** + * Refreshes the access token. + */ + async refreshToken() { + var _a, _b, _c, _d, _e, _f; + try { + await this.sourceClient.getAccessToken(); + const name = 'projects/-/serviceAccounts/' + this.targetPrincipal; + const u = `${this.endpoint}/v1/${name}:generateAccessToken`; + const body = { + delegates: this.delegates, + scope: this.targetScopes, + lifetime: this.lifetime + 's', + }; + const res = await this.sourceClient.request({ + ...Impersonated.RETRY_CONFIG, + url: u, + data: body, + method: 'POST', + }); + const tokenResponse = res.data; + this.credentials.access_token = tokenResponse.accessToken; + this.credentials.expiry_date = Date.parse(tokenResponse.expireTime); + return { + tokens: this.credentials, + res, + }; + } + catch (error) { + if (!(error instanceof Error)) + throw error; + let status = 0; + let message = ''; + if (error instanceof gaxios_1.GaxiosError) { + status = (_c = (_b = (_a = error === null || error === void 0 ? void 0 : error.response) === null || _a === void 0 ? void 0 : _a.data) === null || _b === void 0 ? void 0 : _b.error) === null || _c === void 0 ? void 0 : _c.status; + message = (_f = (_e = (_d = error === null || error === void 0 ? void 0 : error.response) === null || _d === void 0 ? void 0 : _d.data) === null || _e === void 0 ? void 0 : _e.error) === null || _f === void 0 ? void 0 : _f.message; + } + if (status && message) { + error.message = `${status}: unable to impersonate: ${message}`; + throw error; + } + else { + error.message = `unable to impersonate: ${error}`; + throw error; + } + } + } + /** + * Generates an OpenID Connect ID token for a service account. + * + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/generateIdToken Reference Documentation} + * + * @param targetAudience the audience for the fetched ID token. + * @param options the for the request + * @return an OpenID Connect ID token + */ + async fetchIdToken(targetAudience, options) { + var _a, _b; + await this.sourceClient.getAccessToken(); + const name = `projects/-/serviceAccounts/${this.targetPrincipal}`; + const u = `${this.endpoint}/v1/${name}:generateIdToken`; + const body = { + delegates: this.delegates, + audience: targetAudience, + includeEmail: (_a = options === null || options === void 0 ? void 0 : options.includeEmail) !== null && _a !== void 0 ? _a : true, + useEmailAzp: (_b = options === null || options === void 0 ? void 0 : options.includeEmail) !== null && _b !== void 0 ? _b : true, + }; + const res = await this.sourceClient.request({ + ...Impersonated.RETRY_CONFIG, + url: u, + data: body, + method: 'POST', + }); + return res.data.token; + } +} +exports.Impersonated = Impersonated; + + +/***/ }), + +/***/ 7060: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2015 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.JWTAccess = void 0; +const jws = __nccwpck_require__(3324); +const util_1 = __nccwpck_require__(7870); +const DEFAULT_HEADER = { + alg: 'RS256', + typ: 'JWT', +}; +class JWTAccess { + /** + * JWTAccess service account credentials. + * + * Create a new access token by using the credential to create a new JWT token + * that's recognized as the access token. + * + * @param email the service account email address. + * @param key the private key that will be used to sign the token. + * @param keyId the ID of the private key used to sign the token. + */ + constructor(email, key, keyId, eagerRefreshThresholdMillis) { + this.cache = new util_1.LRUCache({ + capacity: 500, + maxAge: 60 * 60 * 1000, + }); + this.email = email; + this.key = key; + this.keyId = keyId; + this.eagerRefreshThresholdMillis = + eagerRefreshThresholdMillis !== null && eagerRefreshThresholdMillis !== void 0 ? eagerRefreshThresholdMillis : 5 * 60 * 1000; + } + /** + * Ensures that we're caching a key appropriately, giving precedence to scopes vs. url + * + * @param url The URI being authorized. + * @param scopes The scope or scopes being authorized + * @returns A string that returns the cached key. + */ + getCachedKey(url, scopes) { + let cacheKey = url; + if (scopes && Array.isArray(scopes) && scopes.length) { + cacheKey = url ? `${url}_${scopes.join('_')}` : `${scopes.join('_')}`; + } + else if (typeof scopes === 'string') { + cacheKey = url ? `${url}_${scopes}` : scopes; + } + if (!cacheKey) { + throw Error('Scopes or url must be provided'); + } + return cacheKey; + } + /** + * Get a non-expired access token, after refreshing if necessary. + * + * @param url The URI being authorized. + * @param additionalClaims An object with a set of additional claims to + * include in the payload. + * @returns An object that includes the authorization header. + */ + getRequestHeaders(url, additionalClaims, scopes) { + // Return cached authorization headers, unless we are within + // eagerRefreshThresholdMillis ms of them expiring: + const key = this.getCachedKey(url, scopes); + const cachedToken = this.cache.get(key); + const now = Date.now(); + if (cachedToken && + cachedToken.expiration - now > this.eagerRefreshThresholdMillis) { + return cachedToken.headers; + } + const iat = Math.floor(Date.now() / 1000); + const exp = JWTAccess.getExpirationTime(iat); + let defaultClaims; + // Turn scopes into space-separated string + if (Array.isArray(scopes)) { + scopes = scopes.join(' '); + } + // If scopes are specified, sign with scopes + if (scopes) { + defaultClaims = { + iss: this.email, + sub: this.email, + scope: scopes, + exp, + iat, + }; + } + else { + defaultClaims = { + iss: this.email, + sub: this.email, + aud: url, + exp, + iat, + }; + } + // if additionalClaims are provided, ensure they do not collide with + // other required claims. + if (additionalClaims) { + for (const claim in defaultClaims) { + if (additionalClaims[claim]) { + throw new Error(`The '${claim}' property is not allowed when passing additionalClaims. This claim is included in the JWT by default.`); + } + } + } + const header = this.keyId + ? { ...DEFAULT_HEADER, kid: this.keyId } + : DEFAULT_HEADER; + const payload = Object.assign(defaultClaims, additionalClaims); + // Sign the jwt and add it to the cache + const signedJWT = jws.sign({ header, payload, secret: this.key }); + const headers = { Authorization: `Bearer ${signedJWT}` }; + this.cache.set(key, { + expiration: exp * 1000, + headers, + }); + return headers; + } + /** + * Returns an expiration time for the JWT token. + * + * @param iat The issued at time for the JWT. + * @returns An expiration time for the JWT. + */ + static getExpirationTime(iat) { + const exp = iat + 3600; // 3600 seconds = 1 hour + return exp; + } + /** + * Create a JWTAccess credentials instance using the given input options. + * @param json The input object. + */ + fromJSON(json) { + if (!json) { + throw new Error('Must pass in a JSON object containing the service account auth settings.'); + } + if (!json.client_email) { + throw new Error('The incoming JSON object does not contain a client_email field'); + } + if (!json.private_key) { + throw new Error('The incoming JSON object does not contain a private_key field'); + } + // Extract the relevant information from the json key file. + this.email = json.client_email; + this.key = json.private_key; + this.keyId = json.private_key_id; + this.projectId = json.project_id; + } + fromStream(inputStream, callback) { + if (callback) { + this.fromStreamAsync(inputStream).then(() => callback(), callback); + } + else { + return this.fromStreamAsync(inputStream); + } + } + fromStreamAsync(inputStream) { + return new Promise((resolve, reject) => { + if (!inputStream) { + reject(new Error('Must pass in a stream containing the service account auth settings.')); + } + let s = ''; + inputStream + .setEncoding('utf8') + .on('data', chunk => (s += chunk)) + .on('error', reject) + .on('end', () => { + try { + const data = JSON.parse(s); + this.fromJSON(data); + resolve(); + } + catch (err) { + reject(err); + } + }); + }); + } +} +exports.JWTAccess = JWTAccess; + + +/***/ }), + +/***/ 5277: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2013 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.JWT = void 0; +const gtoken_1 = __nccwpck_require__(8568); +const jwtaccess_1 = __nccwpck_require__(7060); +const oauth2client_1 = __nccwpck_require__(91); +const authclient_1 = __nccwpck_require__(4810); +class JWT extends oauth2client_1.OAuth2Client { + constructor(optionsOrEmail, keyFile, key, scopes, subject, keyId) { + const opts = optionsOrEmail && typeof optionsOrEmail === 'object' + ? optionsOrEmail + : { email: optionsOrEmail, keyFile, key, keyId, scopes, subject }; + super(opts); + this.email = opts.email; + this.keyFile = opts.keyFile; + this.key = opts.key; + this.keyId = opts.keyId; + this.scopes = opts.scopes; + this.subject = opts.subject; + this.additionalClaims = opts.additionalClaims; + // Start with an expired refresh token, which will automatically be + // refreshed before the first API call is made. + this.credentials = { refresh_token: 'jwt-placeholder', expiry_date: 1 }; + } + /** + * Creates a copy of the credential with the specified scopes. + * @param scopes List of requested scopes or a single scope. + * @return The cloned instance. + */ + createScoped(scopes) { + const jwt = new JWT(this); + jwt.scopes = scopes; + return jwt; + } + /** + * Obtains the metadata to be sent with the request. + * + * @param url the URI being authorized. + */ + async getRequestMetadataAsync(url) { + url = this.defaultServicePath ? `https://${this.defaultServicePath}/` : url; + const useSelfSignedJWT = (!this.hasUserScopes() && url) || + (this.useJWTAccessWithScope && this.hasAnyScopes()) || + this.universeDomain !== authclient_1.DEFAULT_UNIVERSE; + if (this.subject && this.universeDomain !== authclient_1.DEFAULT_UNIVERSE) { + throw new RangeError(`Service Account user is configured for the credential. Domain-wide delegation is not supported in universes other than ${authclient_1.DEFAULT_UNIVERSE}`); + } + if (!this.apiKey && useSelfSignedJWT) { + if (this.additionalClaims && + this.additionalClaims.target_audience) { + const { tokens } = await this.refreshToken(); + return { + headers: this.addSharedMetadataHeaders({ + Authorization: `Bearer ${tokens.id_token}`, + }), + }; + } + else { + // no scopes have been set, but a uri has been provided. Use JWTAccess + // credentials. + if (!this.access) { + this.access = new jwtaccess_1.JWTAccess(this.email, this.key, this.keyId, this.eagerRefreshThresholdMillis); + } + let scopes; + if (this.hasUserScopes()) { + scopes = this.scopes; + } + else if (!url) { + scopes = this.defaultScopes; + } + const useScopes = this.useJWTAccessWithScope || + this.universeDomain !== authclient_1.DEFAULT_UNIVERSE; + const headers = await this.access.getRequestHeaders(url !== null && url !== void 0 ? url : undefined, this.additionalClaims, + // Scopes take precedent over audience for signing, + // so we only provide them if `useJWTAccessWithScope` is on or + // if we are in a non-default universe + useScopes ? scopes : undefined); + return { headers: this.addSharedMetadataHeaders(headers) }; + } + } + else if (this.hasAnyScopes() || this.apiKey) { + return super.getRequestMetadataAsync(url); + } + else { + // If no audience, apiKey, or scopes are provided, we should not attempt + // to populate any headers: + return { headers: {} }; + } + } + /** + * Fetches an ID token. + * @param targetAudience the audience for the fetched ID token. + */ + async fetchIdToken(targetAudience) { + // Create a new gToken for fetching an ID token + const gtoken = new gtoken_1.GoogleToken({ + iss: this.email, + sub: this.subject, + scope: this.scopes || this.defaultScopes, + keyFile: this.keyFile, + key: this.key, + additionalClaims: { target_audience: targetAudience }, + transporter: this.transporter, + }); + await gtoken.getToken({ + forceRefresh: true, + }); + if (!gtoken.idToken) { + throw new Error('Unknown error: Failed to fetch ID token'); + } + return gtoken.idToken; + } + /** + * Determine if there are currently scopes available. + */ + hasUserScopes() { + if (!this.scopes) { + return false; + } + return this.scopes.length > 0; + } + /** + * Are there any default or user scopes defined. + */ + hasAnyScopes() { + if (this.scopes && this.scopes.length > 0) + return true; + if (this.defaultScopes && this.defaultScopes.length > 0) + return true; + return false; + } + authorize(callback) { + if (callback) { + this.authorizeAsync().then(r => callback(null, r), callback); + } + else { + return this.authorizeAsync(); + } + } + async authorizeAsync() { + const result = await this.refreshToken(); + if (!result) { + throw new Error('No result returned'); + } + this.credentials = result.tokens; + this.credentials.refresh_token = 'jwt-placeholder'; + this.key = this.gtoken.key; + this.email = this.gtoken.iss; + return result.tokens; + } + /** + * Refreshes the access token. + * @param refreshToken ignored + * @private + */ + async refreshTokenNoCache( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + refreshToken) { + const gtoken = this.createGToken(); + const token = await gtoken.getToken({ + forceRefresh: this.isTokenExpiring(), + }); + const tokens = { + access_token: token.access_token, + token_type: 'Bearer', + expiry_date: gtoken.expiresAt, + id_token: gtoken.idToken, + }; + this.emit('tokens', tokens); + return { res: null, tokens }; + } + /** + * Create a gToken if it doesn't already exist. + */ + createGToken() { + if (!this.gtoken) { + this.gtoken = new gtoken_1.GoogleToken({ + iss: this.email, + sub: this.subject, + scope: this.scopes || this.defaultScopes, + keyFile: this.keyFile, + key: this.key, + additionalClaims: this.additionalClaims, + transporter: this.transporter, + }); + } + return this.gtoken; + } + /** + * Create a JWT credentials instance using the given input options. + * @param json The input object. + * + * @remarks + * + * **Important**: If you accept a credential configuration (credential JSON/File/Stream) from an external source for authentication to Google Cloud, you must validate it before providing it to any Google API or library. Providing an unvalidated credential configuration to Google APIs can compromise the security of your systems and data. For more information, refer to {@link https://cloud.google.com/docs/authentication/external/externally-sourced-credentials Validate credential configurations from external sources}. + */ + fromJSON(json) { + if (!json) { + throw new Error('Must pass in a JSON object containing the service account auth settings.'); + } + if (!json.client_email) { + throw new Error('The incoming JSON object does not contain a client_email field'); + } + if (!json.private_key) { + throw new Error('The incoming JSON object does not contain a private_key field'); + } + // Extract the relevant information from the json key file. + this.email = json.client_email; + this.key = json.private_key; + this.keyId = json.private_key_id; + this.projectId = json.project_id; + this.quotaProjectId = json.quota_project_id; + this.universeDomain = json.universe_domain || this.universeDomain; + } + fromStream(inputStream, callback) { + if (callback) { + this.fromStreamAsync(inputStream).then(() => callback(), callback); + } + else { + return this.fromStreamAsync(inputStream); + } + } + fromStreamAsync(inputStream) { + return new Promise((resolve, reject) => { + if (!inputStream) { + throw new Error('Must pass in a stream containing the service account auth settings.'); + } + let s = ''; + inputStream + .setEncoding('utf8') + .on('error', reject) + .on('data', chunk => (s += chunk)) + .on('end', () => { + try { + const data = JSON.parse(s); + this.fromJSON(data); + resolve(); + } + catch (e) { + reject(e); + } + }); + }); + } + /** + * Creates a JWT credentials instance using an API Key for authentication. + * @param apiKey The API Key in string form. + */ + fromAPIKey(apiKey) { + if (typeof apiKey !== 'string') { + throw new Error('Must provide an API Key string.'); + } + this.apiKey = apiKey; + } + /** + * Using the key or keyFile on the JWT client, obtain an object that contains + * the key and the client email. + */ + async getCredentials() { + if (this.key) { + return { private_key: this.key, client_email: this.email }; + } + else if (this.keyFile) { + const gtoken = this.createGToken(); + const creds = await gtoken.getCredentials(this.keyFile); + return { private_key: creds.privateKey, client_email: creds.clientEmail }; + } + throw new Error('A key or a keyFile must be provided to getCredentials.'); + } +} +exports.JWT = JWT; + + +/***/ }), + +/***/ 3882: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2014 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.LoginTicket = void 0; +class LoginTicket { + /** + * Create a simple class to extract user ID from an ID Token + * + * @param {string} env Envelope of the jwt + * @param {TokenPayload} pay Payload of the jwt + * @constructor + */ + constructor(env, pay) { + this.envelope = env; + this.payload = pay; + } + getEnvelope() { + return this.envelope; + } + getPayload() { + return this.payload; + } + /** + * Create a simple class to extract user ID from an ID Token + * + * @return The user ID + */ + getUserId() { + const payload = this.getPayload(); + if (payload && payload.sub) { + return payload.sub; + } + return null; + } + /** + * Returns attributes from the login ticket. This can contain + * various information about the user session. + * + * @return The envelope and payload + */ + getAttributes() { + return { envelope: this.getEnvelope(), payload: this.getPayload() }; + } +} +exports.LoginTicket = LoginTicket; + + +/***/ }), + +/***/ 91: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.OAuth2Client = exports.ClientAuthentication = exports.CertificateFormat = exports.CodeChallengeMethod = void 0; +const gaxios_1 = __nccwpck_require__(7003); +const querystring = __nccwpck_require__(3480); +const stream = __nccwpck_require__(2203); +const formatEcdsa = __nccwpck_require__(325); +const crypto_1 = __nccwpck_require__(8851); +const authclient_1 = __nccwpck_require__(4810); +const loginticket_1 = __nccwpck_require__(3882); +var CodeChallengeMethod; +(function (CodeChallengeMethod) { + CodeChallengeMethod["Plain"] = "plain"; + CodeChallengeMethod["S256"] = "S256"; +})(CodeChallengeMethod || (exports.CodeChallengeMethod = CodeChallengeMethod = {})); +var CertificateFormat; +(function (CertificateFormat) { + CertificateFormat["PEM"] = "PEM"; + CertificateFormat["JWK"] = "JWK"; +})(CertificateFormat || (exports.CertificateFormat = CertificateFormat = {})); +/** + * The client authentication type. Supported values are basic, post, and none. + * https://datatracker.ietf.org/doc/html/rfc7591#section-2 + */ +var ClientAuthentication; +(function (ClientAuthentication) { + ClientAuthentication["ClientSecretPost"] = "ClientSecretPost"; + ClientAuthentication["ClientSecretBasic"] = "ClientSecretBasic"; + ClientAuthentication["None"] = "None"; +})(ClientAuthentication || (exports.ClientAuthentication = ClientAuthentication = {})); +class OAuth2Client extends authclient_1.AuthClient { + constructor(optionsOrClientId, clientSecret, redirectUri) { + const opts = optionsOrClientId && typeof optionsOrClientId === 'object' + ? optionsOrClientId + : { clientId: optionsOrClientId, clientSecret, redirectUri }; + super(opts); + this.certificateCache = {}; + this.certificateExpiry = null; + this.certificateCacheFormat = CertificateFormat.PEM; + this.refreshTokenPromises = new Map(); + this._clientId = opts.clientId; + this._clientSecret = opts.clientSecret; + this.redirectUri = opts.redirectUri; + this.endpoints = { + tokenInfoUrl: 'https://oauth2.googleapis.com/tokeninfo', + oauth2AuthBaseUrl: 'https://accounts.google.com/o/oauth2/v2/auth', + oauth2TokenUrl: 'https://oauth2.googleapis.com/token', + oauth2RevokeUrl: 'https://oauth2.googleapis.com/revoke', + oauth2FederatedSignonPemCertsUrl: 'https://www.googleapis.com/oauth2/v1/certs', + oauth2FederatedSignonJwkCertsUrl: 'https://www.googleapis.com/oauth2/v3/certs', + oauth2IapPublicKeyUrl: 'https://www.gstatic.com/iap/verify/public_key', + ...opts.endpoints, + }; + this.clientAuthentication = + opts.clientAuthentication || ClientAuthentication.ClientSecretPost; + this.issuers = opts.issuers || [ + 'accounts.google.com', + 'https://accounts.google.com', + this.universeDomain, + ]; + } + /** + * Generates URL for consent page landing. + * @param opts Options. + * @return URL to consent page. + */ + generateAuthUrl(opts = {}) { + if (opts.code_challenge_method && !opts.code_challenge) { + throw new Error('If a code_challenge_method is provided, code_challenge must be included.'); + } + opts.response_type = opts.response_type || 'code'; + opts.client_id = opts.client_id || this._clientId; + opts.redirect_uri = opts.redirect_uri || this.redirectUri; + // Allow scopes to be passed either as array or a string + if (Array.isArray(opts.scope)) { + opts.scope = opts.scope.join(' '); + } + const rootUrl = this.endpoints.oauth2AuthBaseUrl.toString(); + return (rootUrl + + '?' + + querystring.stringify(opts)); + } + generateCodeVerifier() { + // To make the code compatible with browser SubtleCrypto we need to make + // this method async. + throw new Error('generateCodeVerifier is removed, please use generateCodeVerifierAsync instead.'); + } + /** + * Convenience method to automatically generate a code_verifier, and its + * resulting SHA256. If used, this must be paired with a S256 + * code_challenge_method. + * + * For a full example see: + * https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/oauth2-codeVerifier.js + */ + async generateCodeVerifierAsync() { + // base64 encoding uses 6 bits per character, and we want to generate128 + // characters. 6*128/8 = 96. + const crypto = (0, crypto_1.createCrypto)(); + const randomString = crypto.randomBytesBase64(96); + // The valid characters in the code_verifier are [A-Z]/[a-z]/[0-9]/ + // "-"/"."/"_"/"~". Base64 encoded strings are pretty close, so we're just + // swapping out a few chars. + const codeVerifier = randomString + .replace(/\+/g, '~') + .replace(/=/g, '_') + .replace(/\//g, '-'); + // Generate the base64 encoded SHA256 + const unencodedCodeChallenge = await crypto.sha256DigestBase64(codeVerifier); + // We need to use base64UrlEncoding instead of standard base64 + const codeChallenge = unencodedCodeChallenge + .split('=')[0] + .replace(/\+/g, '-') + .replace(/\//g, '_'); + return { codeVerifier, codeChallenge }; + } + getToken(codeOrOptions, callback) { + const options = typeof codeOrOptions === 'string' ? { code: codeOrOptions } : codeOrOptions; + if (callback) { + this.getTokenAsync(options).then(r => callback(null, r.tokens, r.res), e => callback(e, null, e.response)); + } + else { + return this.getTokenAsync(options); + } + } + async getTokenAsync(options) { + const url = this.endpoints.oauth2TokenUrl.toString(); + const headers = { + 'Content-Type': 'application/x-www-form-urlencoded', + }; + const values = { + client_id: options.client_id || this._clientId, + code_verifier: options.codeVerifier, + code: options.code, + grant_type: 'authorization_code', + redirect_uri: options.redirect_uri || this.redirectUri, + }; + if (this.clientAuthentication === ClientAuthentication.ClientSecretBasic) { + const basic = Buffer.from(`${this._clientId}:${this._clientSecret}`); + headers['Authorization'] = `Basic ${basic.toString('base64')}`; + } + if (this.clientAuthentication === ClientAuthentication.ClientSecretPost) { + values.client_secret = this._clientSecret; + } + const res = await this.transporter.request({ + ...OAuth2Client.RETRY_CONFIG, + method: 'POST', + url, + data: querystring.stringify(values), + headers, + }); + const tokens = res.data; + if (res.data && res.data.expires_in) { + tokens.expiry_date = new Date().getTime() + res.data.expires_in * 1000; + delete tokens.expires_in; + } + this.emit('tokens', tokens); + return { tokens, res }; + } + /** + * Refreshes the access token. + * @param refresh_token Existing refresh token. + * @private + */ + async refreshToken(refreshToken) { + if (!refreshToken) { + return this.refreshTokenNoCache(refreshToken); + } + // If a request to refresh using the same token has started, + // return the same promise. + if (this.refreshTokenPromises.has(refreshToken)) { + return this.refreshTokenPromises.get(refreshToken); + } + const p = this.refreshTokenNoCache(refreshToken).then(r => { + this.refreshTokenPromises.delete(refreshToken); + return r; + }, e => { + this.refreshTokenPromises.delete(refreshToken); + throw e; + }); + this.refreshTokenPromises.set(refreshToken, p); + return p; + } + async refreshTokenNoCache(refreshToken) { + var _a; + if (!refreshToken) { + throw new Error('No refresh token is set.'); + } + const url = this.endpoints.oauth2TokenUrl.toString(); + const data = { + refresh_token: refreshToken, + client_id: this._clientId, + client_secret: this._clientSecret, + grant_type: 'refresh_token', + }; + let res; + try { + // request for new token + res = await this.transporter.request({ + ...OAuth2Client.RETRY_CONFIG, + method: 'POST', + url, + data: querystring.stringify(data), + headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, + }); + } + catch (e) { + if (e instanceof gaxios_1.GaxiosError && + e.message === 'invalid_grant' && + ((_a = e.response) === null || _a === void 0 ? void 0 : _a.data) && + /ReAuth/i.test(e.response.data.error_description)) { + e.message = JSON.stringify(e.response.data); + } + throw e; + } + const tokens = res.data; + // TODO: de-duplicate this code from a few spots + if (res.data && res.data.expires_in) { + tokens.expiry_date = new Date().getTime() + res.data.expires_in * 1000; + delete tokens.expires_in; + } + this.emit('tokens', tokens); + return { tokens, res }; + } + refreshAccessToken(callback) { + if (callback) { + this.refreshAccessTokenAsync().then(r => callback(null, r.credentials, r.res), callback); + } + else { + return this.refreshAccessTokenAsync(); + } + } + async refreshAccessTokenAsync() { + const r = await this.refreshToken(this.credentials.refresh_token); + const tokens = r.tokens; + tokens.refresh_token = this.credentials.refresh_token; + this.credentials = tokens; + return { credentials: this.credentials, res: r.res }; + } + getAccessToken(callback) { + if (callback) { + this.getAccessTokenAsync().then(r => callback(null, r.token, r.res), callback); + } + else { + return this.getAccessTokenAsync(); + } + } + async getAccessTokenAsync() { + const shouldRefresh = !this.credentials.access_token || this.isTokenExpiring(); + if (shouldRefresh) { + if (!this.credentials.refresh_token) { + if (this.refreshHandler) { + const refreshedAccessToken = await this.processAndValidateRefreshHandler(); + if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { + this.setCredentials(refreshedAccessToken); + return { token: this.credentials.access_token }; + } + } + else { + throw new Error('No refresh token or refresh handler callback is set.'); + } + } + const r = await this.refreshAccessTokenAsync(); + if (!r.credentials || (r.credentials && !r.credentials.access_token)) { + throw new Error('Could not refresh access token.'); + } + return { token: r.credentials.access_token, res: r.res }; + } + else { + return { token: this.credentials.access_token }; + } + } + /** + * The main authentication interface. It takes an optional url which when + * present is the endpoint being accessed, and returns a Promise which + * resolves with authorization header fields. + * + * In OAuth2Client, the result has the form: + * { Authorization: 'Bearer ' } + * @param url The optional url being authorized + */ + async getRequestHeaders(url) { + const headers = (await this.getRequestMetadataAsync(url)).headers; + return headers; + } + async getRequestMetadataAsync( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + url) { + const thisCreds = this.credentials; + if (!thisCreds.access_token && + !thisCreds.refresh_token && + !this.apiKey && + !this.refreshHandler) { + throw new Error('No access, refresh token, API key or refresh handler callback is set.'); + } + if (thisCreds.access_token && !this.isTokenExpiring()) { + thisCreds.token_type = thisCreds.token_type || 'Bearer'; + const headers = { + Authorization: thisCreds.token_type + ' ' + thisCreds.access_token, + }; + return { headers: this.addSharedMetadataHeaders(headers) }; + } + // If refreshHandler exists, call processAndValidateRefreshHandler(). + if (this.refreshHandler) { + const refreshedAccessToken = await this.processAndValidateRefreshHandler(); + if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { + this.setCredentials(refreshedAccessToken); + const headers = { + Authorization: 'Bearer ' + this.credentials.access_token, + }; + return { headers: this.addSharedMetadataHeaders(headers) }; + } + } + if (this.apiKey) { + return { headers: { 'X-Goog-Api-Key': this.apiKey } }; + } + let r = null; + let tokens = null; + try { + r = await this.refreshToken(thisCreds.refresh_token); + tokens = r.tokens; + } + catch (err) { + const e = err; + if (e.response && + (e.response.status === 403 || e.response.status === 404)) { + e.message = `Could not refresh access token: ${e.message}`; + } + throw e; + } + const credentials = this.credentials; + credentials.token_type = credentials.token_type || 'Bearer'; + tokens.refresh_token = credentials.refresh_token; + this.credentials = tokens; + const headers = { + Authorization: credentials.token_type + ' ' + tokens.access_token, + }; + return { headers: this.addSharedMetadataHeaders(headers), res: r.res }; + } + /** + * Generates an URL to revoke the given token. + * @param token The existing token to be revoked. + * + * @deprecated use instance method {@link OAuth2Client.getRevokeTokenURL} + */ + static getRevokeTokenUrl(token) { + return new OAuth2Client().getRevokeTokenURL(token).toString(); + } + /** + * Generates a URL to revoke the given token. + * + * @param token The existing token to be revoked. + */ + getRevokeTokenURL(token) { + const url = new URL(this.endpoints.oauth2RevokeUrl); + url.searchParams.append('token', token); + return url; + } + revokeToken(token, callback) { + const opts = { + ...OAuth2Client.RETRY_CONFIG, + url: this.getRevokeTokenURL(token).toString(), + method: 'POST', + }; + if (callback) { + this.transporter + .request(opts) + .then(r => callback(null, r), callback); + } + else { + return this.transporter.request(opts); + } + } + revokeCredentials(callback) { + if (callback) { + this.revokeCredentialsAsync().then(res => callback(null, res), callback); + } + else { + return this.revokeCredentialsAsync(); + } + } + async revokeCredentialsAsync() { + const token = this.credentials.access_token; + this.credentials = {}; + if (token) { + return this.revokeToken(token); + } + else { + throw new Error('No access token to revoke.'); + } + } + request(opts, callback) { + if (callback) { + this.requestAsync(opts).then(r => callback(null, r), e => { + return callback(e, e.response); + }); + } + else { + return this.requestAsync(opts); + } + } + async requestAsync(opts, reAuthRetried = false) { + let r2; + try { + const r = await this.getRequestMetadataAsync(opts.url); + opts.headers = opts.headers || {}; + if (r.headers && r.headers['x-goog-user-project']) { + opts.headers['x-goog-user-project'] = r.headers['x-goog-user-project']; + } + if (r.headers && r.headers.Authorization) { + opts.headers.Authorization = r.headers.Authorization; + } + if (this.apiKey) { + opts.headers['X-Goog-Api-Key'] = this.apiKey; + } + r2 = await this.transporter.request(opts); + } + catch (e) { + const res = e.response; + if (res) { + const statusCode = res.status; + // Retry the request for metadata if the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - An access_token and refresh_token were available, but either no + // expiry_date was available or the forceRefreshOnFailure flag is set. + // The absent expiry_date case can happen when developers stash the + // access_token and refresh_token for later use, but the access_token + // fails on the first try because it's expired. Some developers may + // choose to enable forceRefreshOnFailure to mitigate time-related + // errors. + // Or the following criteria are true: + // - We haven't already retried. It only makes sense to retry once. + // - The response was a 401 or a 403 + // - The request didn't send a readableStream + // - No refresh_token was available + // - An access_token and a refreshHandler callback were available, but + // either no expiry_date was available or the forceRefreshOnFailure + // flag is set. The access_token fails on the first try because it's + // expired. Some developers may choose to enable forceRefreshOnFailure + // to mitigate time-related errors. + const mayRequireRefresh = this.credentials && + this.credentials.access_token && + this.credentials.refresh_token && + (!this.credentials.expiry_date || this.forceRefreshOnFailure); + const mayRequireRefreshWithNoRefreshToken = this.credentials && + this.credentials.access_token && + !this.credentials.refresh_token && + (!this.credentials.expiry_date || this.forceRefreshOnFailure) && + this.refreshHandler; + const isReadableStream = res.config.data instanceof stream.Readable; + const isAuthErr = statusCode === 401 || statusCode === 403; + if (!reAuthRetried && + isAuthErr && + !isReadableStream && + mayRequireRefresh) { + await this.refreshAccessTokenAsync(); + return this.requestAsync(opts, true); + } + else if (!reAuthRetried && + isAuthErr && + !isReadableStream && + mayRequireRefreshWithNoRefreshToken) { + const refreshedAccessToken = await this.processAndValidateRefreshHandler(); + if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { + this.setCredentials(refreshedAccessToken); + } + return this.requestAsync(opts, true); + } + } + throw e; + } + return r2; + } + verifyIdToken(options, callback) { + // This function used to accept two arguments instead of an options object. + // Check the types to help users upgrade with less pain. + // This check can be removed after a 2.0 release. + if (callback && typeof callback !== 'function') { + throw new Error('This method accepts an options object as the first parameter, which includes the idToken, audience, and maxExpiry.'); + } + if (callback) { + this.verifyIdTokenAsync(options).then(r => callback(null, r), callback); + } + else { + return this.verifyIdTokenAsync(options); + } + } + async verifyIdTokenAsync(options) { + if (!options.idToken) { + throw new Error('The verifyIdToken method requires an ID Token'); + } + const response = await this.getFederatedSignonCertsAsync(); + const login = await this.verifySignedJwtWithCertsAsync(options.idToken, response.certs, options.audience, this.issuers, options.maxExpiry); + return login; + } + /** + * Obtains information about the provisioned access token. Especially useful + * if you want to check the scopes that were provisioned to a given token. + * + * @param accessToken Required. The Access Token for which you want to get + * user info. + */ + async getTokenInfo(accessToken) { + const { data } = await this.transporter.request({ + ...OAuth2Client.RETRY_CONFIG, + method: 'POST', + headers: { + 'Content-Type': 'application/x-www-form-urlencoded', + Authorization: `Bearer ${accessToken}`, + }, + url: this.endpoints.tokenInfoUrl.toString(), + }); + const info = Object.assign({ + expiry_date: new Date().getTime() + data.expires_in * 1000, + scopes: data.scope.split(' '), + }, data); + delete info.expires_in; + delete info.scope; + return info; + } + getFederatedSignonCerts(callback) { + if (callback) { + this.getFederatedSignonCertsAsync().then(r => callback(null, r.certs, r.res), callback); + } + else { + return this.getFederatedSignonCertsAsync(); + } + } + async getFederatedSignonCertsAsync() { + const nowTime = new Date().getTime(); + const format = (0, crypto_1.hasBrowserCrypto)() + ? CertificateFormat.JWK + : CertificateFormat.PEM; + if (this.certificateExpiry && + nowTime < this.certificateExpiry.getTime() && + this.certificateCacheFormat === format) { + return { certs: this.certificateCache, format }; + } + let res; + let url; + switch (format) { + case CertificateFormat.PEM: + url = this.endpoints.oauth2FederatedSignonPemCertsUrl.toString(); + break; + case CertificateFormat.JWK: + url = this.endpoints.oauth2FederatedSignonJwkCertsUrl.toString(); + break; + default: + throw new Error(`Unsupported certificate format ${format}`); + } + try { + res = await this.transporter.request({ + ...OAuth2Client.RETRY_CONFIG, + url, + }); + } + catch (e) { + if (e instanceof Error) { + e.message = `Failed to retrieve verification certificates: ${e.message}`; + } + throw e; + } + const cacheControl = res ? res.headers['cache-control'] : undefined; + let cacheAge = -1; + if (cacheControl) { + const pattern = new RegExp('max-age=([0-9]*)'); + const regexResult = pattern.exec(cacheControl); + if (regexResult && regexResult.length === 2) { + // Cache results with max-age (in seconds) + cacheAge = Number(regexResult[1]) * 1000; // milliseconds + } + } + let certificates = {}; + switch (format) { + case CertificateFormat.PEM: + certificates = res.data; + break; + case CertificateFormat.JWK: + for (const key of res.data.keys) { + certificates[key.kid] = key; + } + break; + default: + throw new Error(`Unsupported certificate format ${format}`); + } + const now = new Date(); + this.certificateExpiry = + cacheAge === -1 ? null : new Date(now.getTime() + cacheAge); + this.certificateCache = certificates; + this.certificateCacheFormat = format; + return { certs: certificates, format, res }; + } + getIapPublicKeys(callback) { + if (callback) { + this.getIapPublicKeysAsync().then(r => callback(null, r.pubkeys, r.res), callback); + } + else { + return this.getIapPublicKeysAsync(); + } + } + async getIapPublicKeysAsync() { + let res; + const url = this.endpoints.oauth2IapPublicKeyUrl.toString(); + try { + res = await this.transporter.request({ + ...OAuth2Client.RETRY_CONFIG, + url, + }); + } + catch (e) { + if (e instanceof Error) { + e.message = `Failed to retrieve verification certificates: ${e.message}`; + } + throw e; + } + return { pubkeys: res.data, res }; + } + verifySignedJwtWithCerts() { + // To make the code compatible with browser SubtleCrypto we need to make + // this method async. + throw new Error('verifySignedJwtWithCerts is removed, please use verifySignedJwtWithCertsAsync instead.'); + } + /** + * Verify the id token is signed with the correct certificate + * and is from the correct audience. + * @param jwt The jwt to verify (The ID Token in this case). + * @param certs The array of certs to test the jwt against. + * @param requiredAudience The audience to test the jwt against. + * @param issuers The allowed issuers of the jwt (Optional). + * @param maxExpiry The max expiry the certificate can be (Optional). + * @return Returns a promise resolving to LoginTicket on verification. + */ + async verifySignedJwtWithCertsAsync(jwt, certs, requiredAudience, issuers, maxExpiry) { + const crypto = (0, crypto_1.createCrypto)(); + if (!maxExpiry) { + maxExpiry = OAuth2Client.DEFAULT_MAX_TOKEN_LIFETIME_SECS_; + } + const segments = jwt.split('.'); + if (segments.length !== 3) { + throw new Error('Wrong number of segments in token: ' + jwt); + } + const signed = segments[0] + '.' + segments[1]; + let signature = segments[2]; + let envelope; + let payload; + try { + envelope = JSON.parse(crypto.decodeBase64StringUtf8(segments[0])); + } + catch (err) { + if (err instanceof Error) { + err.message = `Can't parse token envelope: ${segments[0]}': ${err.message}`; + } + throw err; + } + if (!envelope) { + throw new Error("Can't parse token envelope: " + segments[0]); + } + try { + payload = JSON.parse(crypto.decodeBase64StringUtf8(segments[1])); + } + catch (err) { + if (err instanceof Error) { + err.message = `Can't parse token payload '${segments[0]}`; + } + throw err; + } + if (!payload) { + throw new Error("Can't parse token payload: " + segments[1]); + } + if (!Object.prototype.hasOwnProperty.call(certs, envelope.kid)) { + // If this is not present, then there's no reason to attempt verification + throw new Error('No pem found for envelope: ' + JSON.stringify(envelope)); + } + const cert = certs[envelope.kid]; + if (envelope.alg === 'ES256') { + signature = formatEcdsa.joseToDer(signature, 'ES256').toString('base64'); + } + const verified = await crypto.verify(cert, signed, signature); + if (!verified) { + throw new Error('Invalid token signature: ' + jwt); + } + if (!payload.iat) { + throw new Error('No issue time in token: ' + JSON.stringify(payload)); + } + if (!payload.exp) { + throw new Error('No expiration time in token: ' + JSON.stringify(payload)); + } + const iat = Number(payload.iat); + if (isNaN(iat)) + throw new Error('iat field using invalid format'); + const exp = Number(payload.exp); + if (isNaN(exp)) + throw new Error('exp field using invalid format'); + const now = new Date().getTime() / 1000; + if (exp >= now + maxExpiry) { + throw new Error('Expiration time too far in future: ' + JSON.stringify(payload)); + } + const earliest = iat - OAuth2Client.CLOCK_SKEW_SECS_; + const latest = exp + OAuth2Client.CLOCK_SKEW_SECS_; + if (now < earliest) { + throw new Error('Token used too early, ' + + now + + ' < ' + + earliest + + ': ' + + JSON.stringify(payload)); + } + if (now > latest) { + throw new Error('Token used too late, ' + + now + + ' > ' + + latest + + ': ' + + JSON.stringify(payload)); + } + if (issuers && issuers.indexOf(payload.iss) < 0) { + throw new Error('Invalid issuer, expected one of [' + + issuers + + '], but got ' + + payload.iss); + } + // Check the audience matches if we have one + if (typeof requiredAudience !== 'undefined' && requiredAudience !== null) { + const aud = payload.aud; + let audVerified = false; + // If the requiredAudience is an array, check if it contains token + // audience + if (requiredAudience.constructor === Array) { + audVerified = requiredAudience.indexOf(aud) > -1; + } + else { + audVerified = aud === requiredAudience; + } + if (!audVerified) { + throw new Error('Wrong recipient, payload audience != requiredAudience'); + } + } + return new loginticket_1.LoginTicket(envelope, payload); + } + /** + * Returns a promise that resolves with AccessTokenResponse type if + * refreshHandler is defined. + * If not, nothing is returned. + */ + async processAndValidateRefreshHandler() { + if (this.refreshHandler) { + const accessTokenResponse = await this.refreshHandler(); + if (!accessTokenResponse.access_token) { + throw new Error('No access token is returned by the refreshHandler callback.'); + } + return accessTokenResponse; + } + return; + } + /** + * Returns true if a token is expired or will expire within + * eagerRefreshThresholdMillismilliseconds. + * If there is no expiry time, assumes the token is not expired or expiring. + */ + isTokenExpiring() { + const expiryDate = this.credentials.expiry_date; + return expiryDate + ? expiryDate <= new Date().getTime() + this.eagerRefreshThresholdMillis + : false; + } +} +exports.OAuth2Client = OAuth2Client; +/** + * @deprecated use instance's {@link OAuth2Client.endpoints} + */ +OAuth2Client.GOOGLE_TOKEN_INFO_URL = 'https://oauth2.googleapis.com/tokeninfo'; +/** + * Clock skew - five minutes in seconds + */ +OAuth2Client.CLOCK_SKEW_SECS_ = 300; +/** + * The default max Token Lifetime is one day in seconds + */ +OAuth2Client.DEFAULT_MAX_TOKEN_LIFETIME_SECS_ = 86400; + + +/***/ }), + +/***/ 6653: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.OAuthClientAuthHandler = void 0; +exports.getErrorFromOAuthErrorResponse = getErrorFromOAuthErrorResponse; +const querystring = __nccwpck_require__(3480); +const crypto_1 = __nccwpck_require__(8851); +/** List of HTTP methods that accept request bodies. */ +const METHODS_SUPPORTING_REQUEST_BODY = ['PUT', 'POST', 'PATCH']; +/** + * Abstract class for handling client authentication in OAuth-based + * operations. + * When request-body client authentication is used, only application/json and + * application/x-www-form-urlencoded content types for HTTP methods that support + * request bodies are supported. + */ +class OAuthClientAuthHandler { + /** + * Instantiates an OAuth client authentication handler. + * @param clientAuthentication The client auth credentials. + */ + constructor(clientAuthentication) { + this.clientAuthentication = clientAuthentication; + this.crypto = (0, crypto_1.createCrypto)(); + } + /** + * Applies client authentication on the OAuth request's headers or POST + * body but does not process the request. + * @param opts The GaxiosOptions whose headers or data are to be modified + * depending on the client authentication mechanism to be used. + * @param bearerToken The optional bearer token to use for authentication. + * When this is used, no client authentication credentials are needed. + */ + applyClientAuthenticationOptions(opts, bearerToken) { + // Inject authenticated header. + this.injectAuthenticatedHeaders(opts, bearerToken); + // Inject authenticated request body. + if (!bearerToken) { + this.injectAuthenticatedRequestBody(opts); + } + } + /** + * Applies client authentication on the request's header if either + * basic authentication or bearer token authentication is selected. + * + * @param opts The GaxiosOptions whose headers or data are to be modified + * depending on the client authentication mechanism to be used. + * @param bearerToken The optional bearer token to use for authentication. + * When this is used, no client authentication credentials are needed. + */ + injectAuthenticatedHeaders(opts, bearerToken) { + var _a; + // Bearer token prioritized higher than basic Auth. + if (bearerToken) { + opts.headers = opts.headers || {}; + Object.assign(opts.headers, { + Authorization: `Bearer ${bearerToken}}`, + }); + } + else if (((_a = this.clientAuthentication) === null || _a === void 0 ? void 0 : _a.confidentialClientType) === 'basic') { + opts.headers = opts.headers || {}; + const clientId = this.clientAuthentication.clientId; + const clientSecret = this.clientAuthentication.clientSecret || ''; + const base64EncodedCreds = this.crypto.encodeBase64StringUtf8(`${clientId}:${clientSecret}`); + Object.assign(opts.headers, { + Authorization: `Basic ${base64EncodedCreds}`, + }); + } + } + /** + * Applies client authentication on the request's body if request-body + * client authentication is selected. + * + * @param opts The GaxiosOptions whose headers or data are to be modified + * depending on the client authentication mechanism to be used. + */ + injectAuthenticatedRequestBody(opts) { + var _a; + if (((_a = this.clientAuthentication) === null || _a === void 0 ? void 0 : _a.confidentialClientType) === 'request-body') { + const method = (opts.method || 'GET').toUpperCase(); + // Inject authenticated request body. + if (METHODS_SUPPORTING_REQUEST_BODY.indexOf(method) !== -1) { + // Get content-type. + let contentType; + const headers = opts.headers || {}; + for (const key in headers) { + if (key.toLowerCase() === 'content-type' && headers[key]) { + contentType = headers[key].toLowerCase(); + break; + } + } + if (contentType === 'application/x-www-form-urlencoded') { + opts.data = opts.data || ''; + const data = querystring.parse(opts.data); + Object.assign(data, { + client_id: this.clientAuthentication.clientId, + client_secret: this.clientAuthentication.clientSecret || '', + }); + opts.data = querystring.stringify(data); + } + else if (contentType === 'application/json') { + opts.data = opts.data || {}; + Object.assign(opts.data, { + client_id: this.clientAuthentication.clientId, + client_secret: this.clientAuthentication.clientSecret || '', + }); + } + else { + throw new Error(`${contentType} content-types are not supported with ` + + `${this.clientAuthentication.confidentialClientType} ` + + 'client authentication'); + } + } + else { + throw new Error(`${method} HTTP method does not support ` + + `${this.clientAuthentication.confidentialClientType} ` + + 'client authentication'); + } + } + } + /** + * Retry config for Auth-related requests. + * + * @remarks + * + * This is not a part of the default {@link AuthClient.transporter transporter/gaxios} + * config as some downstream APIs would prefer if customers explicitly enable retries, + * such as GCS. + */ + static get RETRY_CONFIG() { + return { + retry: true, + retryConfig: { + httpMethodsToRetry: ['GET', 'PUT', 'POST', 'HEAD', 'OPTIONS', 'DELETE'], + }, + }; + } +} +exports.OAuthClientAuthHandler = OAuthClientAuthHandler; +/** + * Converts an OAuth error response to a native JavaScript Error. + * @param resp The OAuth error response to convert to a native Error object. + * @param err The optional original error. If provided, the error properties + * will be copied to the new error. + * @return The converted native Error object. + */ +function getErrorFromOAuthErrorResponse(resp, err) { + // Error response. + const errorCode = resp.error; + const errorDescription = resp.error_description; + const errorUri = resp.error_uri; + let message = `Error code ${errorCode}`; + if (typeof errorDescription !== 'undefined') { + message += `: ${errorDescription}`; + } + if (typeof errorUri !== 'undefined') { + message += ` - ${errorUri}`; + } + const newError = new Error(message); + // Copy properties from original error to newly generated error. + if (err) { + const keys = Object.keys(err); + if (err.stack) { + // Copy error.stack if available. + keys.push('stack'); + } + keys.forEach(key => { + // Do not overwrite the message field. + if (key !== 'message') { + Object.defineProperty(newError, key, { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + value: err[key], + writable: false, + enumerable: true, + }); + } + }); + } + return newError; +} + + +/***/ }), + +/***/ 2045: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PassThroughClient = void 0; +const authclient_1 = __nccwpck_require__(4810); +/** + * An AuthClient without any Authentication information. Useful for: + * - Anonymous access + * - Local Emulators + * - Testing Environments + * + */ +class PassThroughClient extends authclient_1.AuthClient { + /** + * Creates a request without any authentication headers or checks. + * + * @remarks + * + * In testing environments it may be useful to change the provided + * {@link AuthClient.transporter} for any desired request overrides/handling. + * + * @param opts + * @returns The response of the request. + */ + async request(opts) { + return this.transporter.request(opts); + } + /** + * A required method of the base class. + * Always will return an empty object. + * + * @returns {} + */ + async getAccessToken() { + return {}; + } + /** + * A required method of the base class. + * Always will return an empty object. + * + * @returns {} + */ + async getRequestHeaders() { + return {}; + } +} +exports.PassThroughClient = PassThroughClient; +const a = new PassThroughClient(); +a.getAccessToken(); + + +/***/ }), + +/***/ 6077: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PluggableAuthClient = exports.ExecutableError = void 0; +const baseexternalclient_1 = __nccwpck_require__(142); +const executable_response_1 = __nccwpck_require__(3247); +const pluggable_auth_handler_1 = __nccwpck_require__(908); +/** + * Error thrown from the executable run by PluggableAuthClient. + */ +class ExecutableError extends Error { + constructor(message, code) { + super(`The executable failed with exit code: ${code} and error message: ${message}.`); + this.code = code; + Object.setPrototypeOf(this, new.target.prototype); + } +} +exports.ExecutableError = ExecutableError; +/** + * The default executable timeout when none is provided, in milliseconds. + */ +const DEFAULT_EXECUTABLE_TIMEOUT_MILLIS = 30 * 1000; +/** + * The minimum allowed executable timeout in milliseconds. + */ +const MINIMUM_EXECUTABLE_TIMEOUT_MILLIS = 5 * 1000; +/** + * The maximum allowed executable timeout in milliseconds. + */ +const MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS = 120 * 1000; +/** + * The environment variable to check to see if executable can be run. + * Value must be set to '1' for the executable to run. + */ +const GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES = 'GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES'; +/** + * The maximum currently supported executable version. + */ +const MAXIMUM_EXECUTABLE_VERSION = 1; +/** + * PluggableAuthClient enables the exchange of workload identity pool external credentials for + * Google access tokens by retrieving 3rd party tokens through a user supplied executable. These + * scripts/executables are completely independent of the Google Cloud Auth libraries. These + * credentials plug into ADC and will call the specified executable to retrieve the 3rd party token + * to be exchanged for a Google access token. + * + *

To use these credentials, the GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment variable + * must be set to '1'. This is for security reasons. + * + *

Both OIDC and SAML are supported. The executable must adhere to a specific response format + * defined below. + * + *

The executable must print out the 3rd party token to STDOUT in JSON format. When an + * output_file is specified in the credential configuration, the executable must also handle writing the + * JSON response to this file. + * + *

+ * OIDC response sample:
+ * {
+ *   "version": 1,
+ *   "success": true,
+ *   "token_type": "urn:ietf:params:oauth:token-type:id_token",
+ *   "id_token": "HEADER.PAYLOAD.SIGNATURE",
+ *   "expiration_time": 1620433341
+ * }
+ *
+ * SAML2 response sample:
+ * {
+ *   "version": 1,
+ *   "success": true,
+ *   "token_type": "urn:ietf:params:oauth:token-type:saml2",
+ *   "saml_response": "...",
+ *   "expiration_time": 1620433341
+ * }
+ *
+ * Error response sample:
+ * {
+ *   "version": 1,
+ *   "success": false,
+ *   "code": "401",
+ *   "message": "Error message."
+ * }
+ * 
+ * + *

The "expiration_time" field in the JSON response is only required for successful + * responses when an output file was specified in the credential configuration + * + *

The auth libraries will populate certain environment variables that will be accessible by the + * executable, such as: GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE, GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE, + * GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE, GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL, and + * GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE. + * + *

Please see this repositories README for a complete executable request/response specification. + */ +class PluggableAuthClient extends baseexternalclient_1.BaseExternalAccountClient { + /** + * Instantiates a PluggableAuthClient instance using the provided JSON + * object loaded from an external account credentials file. + * An error is thrown if the credential is not a valid pluggable auth credential. + * @param options The external account options object typically loaded from + * the external account JSON credential file. + * @param additionalOptions **DEPRECATED, all options are available in the + * `options` parameter.** Optional additional behavior customization options. + * These currently customize expiration threshold time and whether to retry + * on 401/403 API request errors. + */ + constructor(options, additionalOptions) { + super(options, additionalOptions); + if (!options.credential_source.executable) { + throw new Error('No valid Pluggable Auth "credential_source" provided.'); + } + this.command = options.credential_source.executable.command; + if (!this.command) { + throw new Error('No valid Pluggable Auth "credential_source" provided.'); + } + // Check if the provided timeout exists and if it is valid. + if (options.credential_source.executable.timeout_millis === undefined) { + this.timeoutMillis = DEFAULT_EXECUTABLE_TIMEOUT_MILLIS; + } + else { + this.timeoutMillis = options.credential_source.executable.timeout_millis; + if (this.timeoutMillis < MINIMUM_EXECUTABLE_TIMEOUT_MILLIS || + this.timeoutMillis > MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS) { + throw new Error(`Timeout must be between ${MINIMUM_EXECUTABLE_TIMEOUT_MILLIS} and ` + + `${MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS} milliseconds.`); + } + } + this.outputFile = options.credential_source.executable.output_file; + this.handler = new pluggable_auth_handler_1.PluggableAuthHandler({ + command: this.command, + timeoutMillis: this.timeoutMillis, + outputFile: this.outputFile, + }); + this.credentialSourceType = 'executable'; + } + /** + * Triggered when an external subject token is needed to be exchanged for a + * GCP access token via GCP STS endpoint. + * This uses the `options.credential_source` object to figure out how + * to retrieve the token using the current environment. In this case, + * this calls a user provided executable which returns the subject token. + * The logic is summarized as: + * 1. Validated that the executable is allowed to run. The + * GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment must be set to + * 1 for security reasons. + * 2. If an output file is specified by the user, check the file location + * for a response. If the file exists and contains a valid response, + * return the subject token from the file. + * 3. Call the provided executable and return response. + * @return A promise that resolves with the external subject token. + */ + async retrieveSubjectToken() { + // Check if the executable is allowed to run. + if (process.env[GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES] !== '1') { + throw new Error('Pluggable Auth executables need to be explicitly allowed to run by ' + + 'setting the GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment ' + + 'Variable to 1.'); + } + let executableResponse = undefined; + // Try to get cached executable response from output file. + if (this.outputFile) { + executableResponse = await this.handler.retrieveCachedResponse(); + } + // If no response from output file, call the executable. + if (!executableResponse) { + // Set up environment map with required values for the executable. + const envMap = new Map(); + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE', this.audience); + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE', this.subjectTokenType); + // Always set to 0 because interactive mode is not supported. + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE', '0'); + if (this.outputFile) { + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE', this.outputFile); + } + const serviceAccountEmail = this.getServiceAccountEmail(); + if (serviceAccountEmail) { + envMap.set('GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL', serviceAccountEmail); + } + executableResponse = + await this.handler.retrieveResponseFromExecutable(envMap); + } + if (executableResponse.version > MAXIMUM_EXECUTABLE_VERSION) { + throw new Error(`Version of executable is not currently supported, maximum supported version is ${MAXIMUM_EXECUTABLE_VERSION}.`); + } + // Check that response was successful. + if (!executableResponse.success) { + throw new ExecutableError(executableResponse.errorMessage, executableResponse.errorCode); + } + // Check that response contains expiration time if output file was specified. + if (this.outputFile) { + if (!executableResponse.expirationTime) { + throw new executable_response_1.InvalidExpirationTimeFieldError('The executable response must contain the `expiration_time` field for successful responses when an output_file has been specified in the configuration.'); + } + } + // Check that response is not expired. + if (executableResponse.isExpired()) { + throw new Error('Executable response is expired.'); + } + // Return subject token from response. + return executableResponse.subjectToken; + } +} +exports.PluggableAuthClient = PluggableAuthClient; + + +/***/ }), + +/***/ 908: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PluggableAuthHandler = void 0; +const pluggable_auth_client_1 = __nccwpck_require__(6077); +const executable_response_1 = __nccwpck_require__(3247); +const childProcess = __nccwpck_require__(5317); +const fs = __nccwpck_require__(9896); +/** + * A handler used to retrieve 3rd party token responses from user defined + * executables and cached file output for the PluggableAuthClient class. + */ +class PluggableAuthHandler { + /** + * Instantiates a PluggableAuthHandler instance using the provided + * PluggableAuthHandlerOptions object. + */ + constructor(options) { + if (!options.command) { + throw new Error('No command provided.'); + } + this.commandComponents = PluggableAuthHandler.parseCommand(options.command); + this.timeoutMillis = options.timeoutMillis; + if (!this.timeoutMillis) { + throw new Error('No timeoutMillis provided.'); + } + this.outputFile = options.outputFile; + } + /** + * Calls user provided executable to get a 3rd party subject token and + * returns the response. + * @param envMap a Map of additional Environment Variables required for + * the executable. + * @return A promise that resolves with the executable response. + */ + retrieveResponseFromExecutable(envMap) { + return new Promise((resolve, reject) => { + // Spawn process to run executable using added environment variables. + const child = childProcess.spawn(this.commandComponents[0], this.commandComponents.slice(1), { + env: { ...process.env, ...Object.fromEntries(envMap) }, + }); + let output = ''; + // Append stdout to output as executable runs. + child.stdout.on('data', (data) => { + output += data; + }); + // Append stderr as executable runs. + child.stderr.on('data', (err) => { + output += err; + }); + // Set up a timeout to end the child process and throw an error. + const timeout = setTimeout(() => { + // Kill child process and remove listeners so 'close' event doesn't get + // read after child process is killed. + child.removeAllListeners(); + child.kill(); + return reject(new Error('The executable failed to finish within the timeout specified.')); + }, this.timeoutMillis); + child.on('close', (code) => { + // Cancel timeout if executable closes before timeout is reached. + clearTimeout(timeout); + if (code === 0) { + // If the executable completed successfully, try to return the parsed response. + try { + const responseJson = JSON.parse(output); + const response = new executable_response_1.ExecutableResponse(responseJson); + return resolve(response); + } + catch (error) { + if (error instanceof executable_response_1.ExecutableResponseError) { + return reject(error); + } + return reject(new executable_response_1.ExecutableResponseError(`The executable returned an invalid response: ${output}`)); + } + } + else { + return reject(new pluggable_auth_client_1.ExecutableError(output, code.toString())); + } + }); + }); + } + /** + * Checks user provided output file for response from previous run of + * executable and return the response if it exists, is formatted correctly, and is not expired. + */ + async retrieveCachedResponse() { + if (!this.outputFile || this.outputFile.length === 0) { + return undefined; + } + let filePath; + try { + filePath = await fs.promises.realpath(this.outputFile); + } + catch (_a) { + // If file path cannot be resolved, return undefined. + return undefined; + } + if (!(await fs.promises.lstat(filePath)).isFile()) { + // If path does not lead to file, return undefined. + return undefined; + } + const responseString = await fs.promises.readFile(filePath, { + encoding: 'utf8', + }); + if (responseString === '') { + return undefined; + } + try { + const responseJson = JSON.parse(responseString); + const response = new executable_response_1.ExecutableResponse(responseJson); + // Check if response is successful and unexpired. + if (response.isValid()) { + return new executable_response_1.ExecutableResponse(responseJson); + } + return undefined; + } + catch (error) { + if (error instanceof executable_response_1.ExecutableResponseError) { + throw error; + } + throw new executable_response_1.ExecutableResponseError(`The output file contained an invalid response: ${responseString}`); + } + } + /** + * Parses given command string into component array, splitting on spaces unless + * spaces are between quotation marks. + */ + static parseCommand(command) { + // Split the command into components by splitting on spaces, + // unless spaces are contained in quotation marks. + const components = command.match(/(?:[^\s"]+|"[^"]*")+/g); + if (!components) { + throw new Error(`Provided command: "${command}" could not be parsed.`); + } + // Remove quotation marks from the beginning and end of each component if they are present. + for (let i = 0; i < components.length; i++) { + if (components[i][0] === '"' && components[i].slice(-1) === '"') { + components[i] = components[i].slice(1, -1); + } + } + return components; + } +} +exports.PluggableAuthHandler = PluggableAuthHandler; + + +/***/ }), + +/***/ 9807: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2015 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.UserRefreshClient = exports.USER_REFRESH_ACCOUNT_TYPE = void 0; +const oauth2client_1 = __nccwpck_require__(91); +const querystring_1 = __nccwpck_require__(3480); +exports.USER_REFRESH_ACCOUNT_TYPE = 'authorized_user'; +class UserRefreshClient extends oauth2client_1.OAuth2Client { + constructor(optionsOrClientId, clientSecret, refreshToken, eagerRefreshThresholdMillis, forceRefreshOnFailure) { + const opts = optionsOrClientId && typeof optionsOrClientId === 'object' + ? optionsOrClientId + : { + clientId: optionsOrClientId, + clientSecret, + refreshToken, + eagerRefreshThresholdMillis, + forceRefreshOnFailure, + }; + super(opts); + this._refreshToken = opts.refreshToken; + this.credentials.refresh_token = opts.refreshToken; + } + /** + * Refreshes the access token. + * @param refreshToken An ignored refreshToken.. + * @param callback Optional callback. + */ + async refreshTokenNoCache( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + refreshToken) { + return super.refreshTokenNoCache(this._refreshToken); + } + async fetchIdToken(targetAudience) { + const res = await this.transporter.request({ + ...UserRefreshClient.RETRY_CONFIG, + url: this.endpoints.oauth2TokenUrl, + headers: { + 'Content-Type': 'application/x-www-form-urlencoded', + }, + method: 'POST', + data: (0, querystring_1.stringify)({ + client_id: this._clientId, + client_secret: this._clientSecret, + grant_type: 'refresh_token', + refresh_token: this._refreshToken, + target_audience: targetAudience, + }), + }); + return res.data.id_token; + } + /** + * Create a UserRefreshClient credentials instance using the given input + * options. + * @param json The input object. + */ + fromJSON(json) { + if (!json) { + throw new Error('Must pass in a JSON object containing the user refresh token'); + } + if (json.type !== 'authorized_user') { + throw new Error('The incoming JSON object does not have the "authorized_user" type'); + } + if (!json.client_id) { + throw new Error('The incoming JSON object does not contain a client_id field'); + } + if (!json.client_secret) { + throw new Error('The incoming JSON object does not contain a client_secret field'); + } + if (!json.refresh_token) { + throw new Error('The incoming JSON object does not contain a refresh_token field'); + } + this._clientId = json.client_id; + this._clientSecret = json.client_secret; + this._refreshToken = json.refresh_token; + this.credentials.refresh_token = json.refresh_token; + this.quotaProjectId = json.quota_project_id; + this.universeDomain = json.universe_domain || this.universeDomain; + } + fromStream(inputStream, callback) { + if (callback) { + this.fromStreamAsync(inputStream).then(() => callback(), callback); + } + else { + return this.fromStreamAsync(inputStream); + } + } + async fromStreamAsync(inputStream) { + return new Promise((resolve, reject) => { + if (!inputStream) { + return reject(new Error('Must pass in a stream containing the user refresh token.')); + } + let s = ''; + inputStream + .setEncoding('utf8') + .on('error', reject) + .on('data', chunk => (s += chunk)) + .on('end', () => { + try { + const data = JSON.parse(s); + this.fromJSON(data); + return resolve(); + } + catch (err) { + return reject(err); + } + }); + }); + } + /** + * Create a UserRefreshClient credentials instance using the given input + * options. + * @param json The input object. + */ + static fromJSON(json) { + const client = new UserRefreshClient(); + client.fromJSON(json); + return client; + } +} +exports.UserRefreshClient = UserRefreshClient; + + +/***/ }), + +/***/ 121: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.StsCredentials = void 0; +const gaxios_1 = __nccwpck_require__(7003); +const querystring = __nccwpck_require__(3480); +const transporters_1 = __nccwpck_require__(7633); +const oauth2common_1 = __nccwpck_require__(6653); +/** + * Implements the OAuth 2.0 token exchange based on + * https://tools.ietf.org/html/rfc8693 + */ +class StsCredentials extends oauth2common_1.OAuthClientAuthHandler { + /** + * Initializes an STS credentials instance. + * @param tokenExchangeEndpoint The token exchange endpoint. + * @param clientAuthentication The client authentication credentials if + * available. + */ + constructor(tokenExchangeEndpoint, clientAuthentication) { + super(clientAuthentication); + this.tokenExchangeEndpoint = tokenExchangeEndpoint; + this.transporter = new transporters_1.DefaultTransporter(); + } + /** + * Exchanges the provided token for another type of token based on the + * rfc8693 spec. + * @param stsCredentialsOptions The token exchange options used to populate + * the token exchange request. + * @param additionalHeaders Optional additional headers to pass along the + * request. + * @param options Optional additional GCP-specific non-spec defined options + * to send with the request. + * Example: `&options=${encodeUriComponent(JSON.stringified(options))}` + * @return A promise that resolves with the token exchange response containing + * the requested token and its expiration time. + */ + async exchangeToken(stsCredentialsOptions, additionalHeaders, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + options) { + var _a, _b, _c; + const values = { + grant_type: stsCredentialsOptions.grantType, + resource: stsCredentialsOptions.resource, + audience: stsCredentialsOptions.audience, + scope: (_a = stsCredentialsOptions.scope) === null || _a === void 0 ? void 0 : _a.join(' '), + requested_token_type: stsCredentialsOptions.requestedTokenType, + subject_token: stsCredentialsOptions.subjectToken, + subject_token_type: stsCredentialsOptions.subjectTokenType, + actor_token: (_b = stsCredentialsOptions.actingParty) === null || _b === void 0 ? void 0 : _b.actorToken, + actor_token_type: (_c = stsCredentialsOptions.actingParty) === null || _c === void 0 ? void 0 : _c.actorTokenType, + // Non-standard GCP-specific options. + options: options && JSON.stringify(options), + }; + // Remove undefined fields. + Object.keys(values).forEach(key => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + if (typeof values[key] === 'undefined') { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + delete values[key]; + } + }); + const headers = { + 'Content-Type': 'application/x-www-form-urlencoded', + }; + // Inject additional STS headers if available. + Object.assign(headers, additionalHeaders || {}); + const opts = { + ...StsCredentials.RETRY_CONFIG, + url: this.tokenExchangeEndpoint.toString(), + method: 'POST', + headers, + data: querystring.stringify(values), + responseType: 'json', + }; + // Apply OAuth client authentication. + this.applyClientAuthenticationOptions(opts); + try { + const response = await this.transporter.request(opts); + // Successful response. + const stsSuccessfulResponse = response.data; + stsSuccessfulResponse.res = response; + return stsSuccessfulResponse; + } + catch (error) { + // Translate error to OAuthError. + if (error instanceof gaxios_1.GaxiosError && error.response) { + throw (0, oauth2common_1.getErrorFromOAuthErrorResponse)(error.response.data, + // Preserve other fields from the original error. + error); + } + // Request could fail before the server responds. + throw error; + } + } +} +exports.StsCredentials = StsCredentials; + + +/***/ }), + +/***/ 4627: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.UrlSubjectTokenSupplier = void 0; +/** + * Internal subject token supplier implementation used when a URL + * is configured in the credential configuration used to build an {@link IdentityPoolClient} + */ +class UrlSubjectTokenSupplier { + /** + * Instantiates a URL subject token supplier. + * @param opts The URL subject token supplier options to build the supplier with. + */ + constructor(opts) { + this.url = opts.url; + this.formatType = opts.formatType; + this.subjectTokenFieldName = opts.subjectTokenFieldName; + this.headers = opts.headers; + this.additionalGaxiosOptions = opts.additionalGaxiosOptions; + } + /** + * Sends a GET request to the URL provided in the constructor and resolves + * with the returned external subject token. + * @param context {@link ExternalAccountSupplierContext} from the calling + * {@link IdentityPoolClient}, contains the requested audience and subject + * token type for the external account identity. Not used. + */ + async getSubjectToken(context) { + const opts = { + ...this.additionalGaxiosOptions, + url: this.url, + method: 'GET', + headers: this.headers, + responseType: this.formatType, + }; + let subjectToken; + if (this.formatType === 'text') { + const response = await context.transporter.request(opts); + subjectToken = response.data; + } + else if (this.formatType === 'json' && this.subjectTokenFieldName) { + const response = await context.transporter.request(opts); + subjectToken = response.data[this.subjectTokenFieldName]; + } + if (!subjectToken) { + throw new Error('Unable to parse the subject_token from the credential_source URL'); + } + return subjectToken; + } +} +exports.UrlSubjectTokenSupplier = UrlSubjectTokenSupplier; + + +/***/ }), + +/***/ 3438: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +/* global window */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.BrowserCrypto = void 0; +// This file implements crypto functions we need using in-browser +// SubtleCrypto interface `window.crypto.subtle`. +const base64js = __nccwpck_require__(8793); +const crypto_1 = __nccwpck_require__(8851); +class BrowserCrypto { + constructor() { + if (typeof window === 'undefined' || + window.crypto === undefined || + window.crypto.subtle === undefined) { + throw new Error("SubtleCrypto not found. Make sure it's an https:// website."); + } + } + async sha256DigestBase64(str) { + // SubtleCrypto digest() method is async, so we must make + // this method async as well. + // To calculate SHA256 digest using SubtleCrypto, we first + // need to convert an input string to an ArrayBuffer: + const inputBuffer = new TextEncoder().encode(str); + // Result is ArrayBuffer as well. + const outputBuffer = await window.crypto.subtle.digest('SHA-256', inputBuffer); + return base64js.fromByteArray(new Uint8Array(outputBuffer)); + } + randomBytesBase64(count) { + const array = new Uint8Array(count); + window.crypto.getRandomValues(array); + return base64js.fromByteArray(array); + } + static padBase64(base64) { + // base64js requires padding, so let's add some '=' + while (base64.length % 4 !== 0) { + base64 += '='; + } + return base64; + } + async verify(pubkey, data, signature) { + const algo = { + name: 'RSASSA-PKCS1-v1_5', + hash: { name: 'SHA-256' }, + }; + const dataArray = new TextEncoder().encode(data); + const signatureArray = base64js.toByteArray(BrowserCrypto.padBase64(signature)); + const cryptoKey = await window.crypto.subtle.importKey('jwk', pubkey, algo, true, ['verify']); + // SubtleCrypto's verify method is async so we must make + // this method async as well. + const result = await window.crypto.subtle.verify(algo, cryptoKey, signatureArray, dataArray); + return result; + } + async sign(privateKey, data) { + const algo = { + name: 'RSASSA-PKCS1-v1_5', + hash: { name: 'SHA-256' }, + }; + const dataArray = new TextEncoder().encode(data); + const cryptoKey = await window.crypto.subtle.importKey('jwk', privateKey, algo, true, ['sign']); + // SubtleCrypto's sign method is async so we must make + // this method async as well. + const result = await window.crypto.subtle.sign(algo, cryptoKey, dataArray); + return base64js.fromByteArray(new Uint8Array(result)); + } + decodeBase64StringUtf8(base64) { + const uint8array = base64js.toByteArray(BrowserCrypto.padBase64(base64)); + const result = new TextDecoder().decode(uint8array); + return result; + } + encodeBase64StringUtf8(text) { + const uint8array = new TextEncoder().encode(text); + const result = base64js.fromByteArray(uint8array); + return result; + } + /** + * Computes the SHA-256 hash of the provided string. + * @param str The plain text string to hash. + * @return A promise that resolves with the SHA-256 hash of the provided + * string in hexadecimal encoding. + */ + async sha256DigestHex(str) { + // SubtleCrypto digest() method is async, so we must make + // this method async as well. + // To calculate SHA256 digest using SubtleCrypto, we first + // need to convert an input string to an ArrayBuffer: + const inputBuffer = new TextEncoder().encode(str); + // Result is ArrayBuffer as well. + const outputBuffer = await window.crypto.subtle.digest('SHA-256', inputBuffer); + return (0, crypto_1.fromArrayBufferToHex)(outputBuffer); + } + /** + * Computes the HMAC hash of a message using the provided crypto key and the + * SHA-256 algorithm. + * @param key The secret crypto key in utf-8 or ArrayBuffer format. + * @param msg The plain text message. + * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer + * format. + */ + async signWithHmacSha256(key, msg) { + // Convert key, if provided in ArrayBuffer format, to string. + const rawKey = typeof key === 'string' + ? key + : String.fromCharCode(...new Uint16Array(key)); + const enc = new TextEncoder(); + const cryptoKey = await window.crypto.subtle.importKey('raw', enc.encode(rawKey), { + name: 'HMAC', + hash: { + name: 'SHA-256', + }, + }, false, ['sign']); + return window.crypto.subtle.sign('HMAC', cryptoKey, enc.encode(msg)); + } +} +exports.BrowserCrypto = BrowserCrypto; + + +/***/ }), + +/***/ 8851: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +/* global window */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createCrypto = createCrypto; +exports.hasBrowserCrypto = hasBrowserCrypto; +exports.fromArrayBufferToHex = fromArrayBufferToHex; +const crypto_1 = __nccwpck_require__(3438); +const crypto_2 = __nccwpck_require__(7388); +function createCrypto() { + if (hasBrowserCrypto()) { + return new crypto_1.BrowserCrypto(); + } + return new crypto_2.NodeCrypto(); +} +function hasBrowserCrypto() { + return (typeof window !== 'undefined' && + typeof window.crypto !== 'undefined' && + typeof window.crypto.subtle !== 'undefined'); +} +/** + * Converts an ArrayBuffer to a hexadecimal string. + * @param arrayBuffer The ArrayBuffer to convert to hexadecimal string. + * @return The hexadecimal encoding of the ArrayBuffer. + */ +function fromArrayBufferToHex(arrayBuffer) { + // Convert buffer to byte array. + const byteArray = Array.from(new Uint8Array(arrayBuffer)); + // Convert bytes to hex string. + return byteArray + .map(byte => { + return byte.toString(16).padStart(2, '0'); + }) + .join(''); +} + + +/***/ }), + +/***/ 7388: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.NodeCrypto = void 0; +const crypto = __nccwpck_require__(6982); +class NodeCrypto { + async sha256DigestBase64(str) { + return crypto.createHash('sha256').update(str).digest('base64'); + } + randomBytesBase64(count) { + return crypto.randomBytes(count).toString('base64'); + } + async verify(pubkey, data, signature) { + const verifier = crypto.createVerify('RSA-SHA256'); + verifier.update(data); + verifier.end(); + return verifier.verify(pubkey, signature, 'base64'); + } + async sign(privateKey, data) { + const signer = crypto.createSign('RSA-SHA256'); + signer.update(data); + signer.end(); + return signer.sign(privateKey, 'base64'); + } + decodeBase64StringUtf8(base64) { + return Buffer.from(base64, 'base64').toString('utf-8'); + } + encodeBase64StringUtf8(text) { + return Buffer.from(text, 'utf-8').toString('base64'); + } + /** + * Computes the SHA-256 hash of the provided string. + * @param str The plain text string to hash. + * @return A promise that resolves with the SHA-256 hash of the provided + * string in hexadecimal encoding. + */ + async sha256DigestHex(str) { + return crypto.createHash('sha256').update(str).digest('hex'); + } + /** + * Computes the HMAC hash of a message using the provided crypto key and the + * SHA-256 algorithm. + * @param key The secret crypto key in utf-8 or ArrayBuffer format. + * @param msg The plain text message. + * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer + * format. + */ + async signWithHmacSha256(key, msg) { + const cryptoKey = typeof key === 'string' ? key : toBuffer(key); + return toArrayBuffer(crypto.createHmac('sha256', cryptoKey).update(msg).digest()); + } +} +exports.NodeCrypto = NodeCrypto; +/** + * Converts a Node.js Buffer to an ArrayBuffer. + * https://stackoverflow.com/questions/8609289/convert-a-binary-nodejs-buffer-to-javascript-arraybuffer + * @param buffer The Buffer input to covert. + * @return The ArrayBuffer representation of the input. + */ +function toArrayBuffer(buffer) { + return buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength); +} +/** + * Converts an ArrayBuffer to a Node.js Buffer. + * @param arrayBuffer The ArrayBuffer input to covert. + * @return The Buffer representation of the input. + */ +function toBuffer(arrayBuffer) { + return Buffer.from(arrayBuffer); +} + + +/***/ }), + +/***/ 492: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GoogleAuth = exports.auth = exports.DefaultTransporter = exports.PassThroughClient = exports.ExecutableError = exports.PluggableAuthClient = exports.DownscopedClient = exports.BaseExternalAccountClient = exports.ExternalAccountClient = exports.IdentityPoolClient = exports.AwsRequestSigner = exports.AwsClient = exports.UserRefreshClient = exports.LoginTicket = exports.ClientAuthentication = exports.OAuth2Client = exports.CodeChallengeMethod = exports.Impersonated = exports.JWT = exports.JWTAccess = exports.IdTokenClient = exports.IAMAuth = exports.GCPEnv = exports.Compute = exports.DEFAULT_UNIVERSE = exports.AuthClient = exports.gaxios = exports.gcpMetadata = void 0; +// Copyright 2017 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +const googleauth_1 = __nccwpck_require__(5934); +Object.defineProperty(exports, "GoogleAuth", ({ enumerable: true, get: function () { return googleauth_1.GoogleAuth; } })); +// Export common deps to ensure types/instances are the exact match. Useful +// for consistently configuring the library across versions. +exports.gcpMetadata = __nccwpck_require__(3046); +exports.gaxios = __nccwpck_require__(7003); +var authclient_1 = __nccwpck_require__(4810); +Object.defineProperty(exports, "AuthClient", ({ enumerable: true, get: function () { return authclient_1.AuthClient; } })); +Object.defineProperty(exports, "DEFAULT_UNIVERSE", ({ enumerable: true, get: function () { return authclient_1.DEFAULT_UNIVERSE; } })); +var computeclient_1 = __nccwpck_require__(977); +Object.defineProperty(exports, "Compute", ({ enumerable: true, get: function () { return computeclient_1.Compute; } })); +var envDetect_1 = __nccwpck_require__(963); +Object.defineProperty(exports, "GCPEnv", ({ enumerable: true, get: function () { return envDetect_1.GCPEnv; } })); +var iam_1 = __nccwpck_require__(7009); +Object.defineProperty(exports, "IAMAuth", ({ enumerable: true, get: function () { return iam_1.IAMAuth; } })); +var idtokenclient_1 = __nccwpck_require__(2718); +Object.defineProperty(exports, "IdTokenClient", ({ enumerable: true, get: function () { return idtokenclient_1.IdTokenClient; } })); +var jwtaccess_1 = __nccwpck_require__(7060); +Object.defineProperty(exports, "JWTAccess", ({ enumerable: true, get: function () { return jwtaccess_1.JWTAccess; } })); +var jwtclient_1 = __nccwpck_require__(5277); +Object.defineProperty(exports, "JWT", ({ enumerable: true, get: function () { return jwtclient_1.JWT; } })); +var impersonated_1 = __nccwpck_require__(9964); +Object.defineProperty(exports, "Impersonated", ({ enumerable: true, get: function () { return impersonated_1.Impersonated; } })); +var oauth2client_1 = __nccwpck_require__(91); +Object.defineProperty(exports, "CodeChallengeMethod", ({ enumerable: true, get: function () { return oauth2client_1.CodeChallengeMethod; } })); +Object.defineProperty(exports, "OAuth2Client", ({ enumerable: true, get: function () { return oauth2client_1.OAuth2Client; } })); +Object.defineProperty(exports, "ClientAuthentication", ({ enumerable: true, get: function () { return oauth2client_1.ClientAuthentication; } })); +var loginticket_1 = __nccwpck_require__(3882); +Object.defineProperty(exports, "LoginTicket", ({ enumerable: true, get: function () { return loginticket_1.LoginTicket; } })); +var refreshclient_1 = __nccwpck_require__(9807); +Object.defineProperty(exports, "UserRefreshClient", ({ enumerable: true, get: function () { return refreshclient_1.UserRefreshClient; } })); +var awsclient_1 = __nccwpck_require__(1261); +Object.defineProperty(exports, "AwsClient", ({ enumerable: true, get: function () { return awsclient_1.AwsClient; } })); +var awsrequestsigner_1 = __nccwpck_require__(7647); +Object.defineProperty(exports, "AwsRequestSigner", ({ enumerable: true, get: function () { return awsrequestsigner_1.AwsRequestSigner; } })); +var identitypoolclient_1 = __nccwpck_require__(9960); +Object.defineProperty(exports, "IdentityPoolClient", ({ enumerable: true, get: function () { return identitypoolclient_1.IdentityPoolClient; } })); +var externalclient_1 = __nccwpck_require__(8323); +Object.defineProperty(exports, "ExternalAccountClient", ({ enumerable: true, get: function () { return externalclient_1.ExternalAccountClient; } })); +var baseexternalclient_1 = __nccwpck_require__(142); +Object.defineProperty(exports, "BaseExternalAccountClient", ({ enumerable: true, get: function () { return baseexternalclient_1.BaseExternalAccountClient; } })); +var downscopedclient_1 = __nccwpck_require__(7556); +Object.defineProperty(exports, "DownscopedClient", ({ enumerable: true, get: function () { return downscopedclient_1.DownscopedClient; } })); +var pluggable_auth_client_1 = __nccwpck_require__(6077); +Object.defineProperty(exports, "PluggableAuthClient", ({ enumerable: true, get: function () { return pluggable_auth_client_1.PluggableAuthClient; } })); +Object.defineProperty(exports, "ExecutableError", ({ enumerable: true, get: function () { return pluggable_auth_client_1.ExecutableError; } })); +var passthrough_1 = __nccwpck_require__(2045); +Object.defineProperty(exports, "PassThroughClient", ({ enumerable: true, get: function () { return passthrough_1.PassThroughClient; } })); +var transporters_1 = __nccwpck_require__(7633); +Object.defineProperty(exports, "DefaultTransporter", ({ enumerable: true, get: function () { return transporters_1.DefaultTransporter; } })); +const auth = new googleauth_1.GoogleAuth(); +exports.auth = auth; + + +/***/ }), + +/***/ 8290: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2017 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.validate = validate; +// Accepts an options object passed from the user to the API. In the +// previous version of the API, it referred to a `Request` options object. +// Now it refers to an Axiox Request Config object. This is here to help +// ensure users don't pass invalid options when they upgrade from 0.x to 1.x. +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function validate(options) { + const vpairs = [ + { invalid: 'uri', expected: 'url' }, + { invalid: 'json', expected: 'data' }, + { invalid: 'qs', expected: 'params' }, + ]; + for (const pair of vpairs) { + if (options[pair.invalid]) { + const e = `'${pair.invalid}' is not a valid configuration option. Please use '${pair.expected}' instead. This library is using Axios for requests. Please see https://github.com/axios/axios to learn more about the valid request options.`; + throw new Error(e); + } + } +} + + +/***/ }), + +/***/ 7633: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.DefaultTransporter = void 0; +const gaxios_1 = __nccwpck_require__(7003); +const options_1 = __nccwpck_require__(8290); +// eslint-disable-next-line @typescript-eslint/no-var-requires +const pkg = __nccwpck_require__(6066); +const PRODUCT_NAME = 'google-api-nodejs-client'; +class DefaultTransporter { + constructor() { + /** + * A configurable, replacable `Gaxios` instance. + */ + this.instance = new gaxios_1.Gaxios(); + } + /** + * Configures request options before making a request. + * @param opts GaxiosOptions options. + * @return Configured options. + */ + configure(opts = {}) { + opts.headers = opts.headers || {}; + if (typeof window === 'undefined') { + // set transporter user agent if not in browser + const uaValue = opts.headers['User-Agent']; + if (!uaValue) { + opts.headers['User-Agent'] = DefaultTransporter.USER_AGENT; + } + else if (!uaValue.includes(`${PRODUCT_NAME}/`)) { + opts.headers['User-Agent'] = + `${uaValue} ${DefaultTransporter.USER_AGENT}`; + } + // track google-auth-library-nodejs version: + if (!opts.headers['x-goog-api-client']) { + const nodeVersion = process.version.replace(/^v/, ''); + opts.headers['x-goog-api-client'] = `gl-node/${nodeVersion}`; + } + } + return opts; + } + /** + * Makes a request using Gaxios with given options. + * @param opts GaxiosOptions options. + * @param callback optional callback that contains GaxiosResponse object. + * @return GaxiosPromise, assuming no callback is passed. + */ + request(opts) { + // ensure the user isn't passing in request-style options + opts = this.configure(opts); + (0, options_1.validate)(opts); + return this.instance.request(opts).catch(e => { + throw this.processError(e); + }); + } + get defaults() { + return this.instance.defaults; + } + set defaults(opts) { + this.instance.defaults = opts; + } + /** + * Changes the error to include details from the body. + */ + processError(e) { + const res = e.response; + const err = e; + const body = res ? res.data : null; + if (res && body && body.error && res.status !== 200) { + if (typeof body.error === 'string') { + err.message = body.error; + err.status = res.status; + } + else if (Array.isArray(body.error.errors)) { + err.message = body.error.errors + .map((err2) => err2.message) + .join('\n'); + err.code = body.error.code; + err.errors = body.error.errors; + } + else { + err.message = body.error.message; + err.code = body.error.code; + } + } + else if (res && res.status >= 400) { + // Consider all 4xx and 5xx responses errors. + err.message = body; + err.status = res.status; + } + return err; + } +} +exports.DefaultTransporter = DefaultTransporter; +/** + * Default user agent. + */ +DefaultTransporter.USER_AGENT = `${PRODUCT_NAME}/${pkg.version}`; + + +/***/ }), + +/***/ 7870: +/***/ (function(__unused_webpack_module, exports) { + +"use strict"; + +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _LRUCache_instances, _LRUCache_cache, _LRUCache_moveToEnd, _LRUCache_evict; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.LRUCache = void 0; +exports.snakeToCamel = snakeToCamel; +exports.originalOrCamelOptions = originalOrCamelOptions; +/** + * Returns the camel case of a provided string. + * + * @remarks + * + * Match any `_` and not `_` pair, then return the uppercase of the not `_` + * character. + * + * @internal + * + * @param str the string to convert + * @returns the camelCase'd string + */ +function snakeToCamel(str) { + return str.replace(/([_][^_])/g, match => match.slice(1).toUpperCase()); +} +/** + * Get the value of `obj[key]` or `obj[camelCaseKey]`, with a preference + * for original, non-camelCase key. + * + * @param obj object to lookup a value in + * @returns a `get` function for getting `obj[key || snakeKey]`, if available + */ +function originalOrCamelOptions(obj) { + /** + * + * @param key an index of object, preferably snake_case + * @returns the value `obj[key || snakeKey]`, if available + */ + function get(key) { + var _a; + const o = (obj || {}); + return (_a = o[key]) !== null && _a !== void 0 ? _a : o[snakeToCamel(key)]; + } + return { get }; +} +/** + * A simple LRU cache utility. + * Not meant for external usage. + * + * @experimental + * @internal + */ +class LRUCache { + constructor(options) { + _LRUCache_instances.add(this); + /** + * Maps are in order. Thus, the older item is the first item. + * + * {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map} + */ + _LRUCache_cache.set(this, new Map()); + this.capacity = options.capacity; + this.maxAge = options.maxAge; + } + /** + * Add an item to the cache. + * + * @param key the key to upsert + * @param value the value of the key + */ + set(key, value) { + __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_moveToEnd).call(this, key, value); + __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_evict).call(this); + } + /** + * Get an item from the cache. + * + * @param key the key to retrieve + */ + get(key) { + const item = __classPrivateFieldGet(this, _LRUCache_cache, "f").get(key); + if (!item) + return; + __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_moveToEnd).call(this, key, item.value); + __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_evict).call(this); + return item.value; + } +} +exports.LRUCache = LRUCache; +_LRUCache_cache = new WeakMap(), _LRUCache_instances = new WeakSet(), _LRUCache_moveToEnd = function _LRUCache_moveToEnd(key, value) { + __classPrivateFieldGet(this, _LRUCache_cache, "f").delete(key); + __classPrivateFieldGet(this, _LRUCache_cache, "f").set(key, { + value, + lastAccessed: Date.now(), + }); +}, _LRUCache_evict = function _LRUCache_evict() { + const cutoffDate = this.maxAge ? Date.now() - this.maxAge : 0; + /** + * Because we know Maps are in order, this item is both the + * last item in the list (capacity) and oldest (maxAge). + */ + let oldestItem = __classPrivateFieldGet(this, _LRUCache_cache, "f").entries().next(); + while (!oldestItem.done && + (__classPrivateFieldGet(this, _LRUCache_cache, "f").size > this.capacity || // too many + oldestItem.value[1].lastAccessed < cutoffDate) // too old + ) { + __classPrivateFieldGet(this, _LRUCache_cache, "f").delete(oldestItem.value[0]); + oldestItem = __classPrivateFieldGet(this, _LRUCache_cache, "f").entries().next(); + } +}; + + +/***/ }), + +/***/ 1628: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Colours = void 0; +/** + * Handles figuring out if we can use ANSI colours and handing out the escape codes. + * + * This is for package-internal use only, and may change at any time. + * + * @private + * @internal + */ +class Colours { + /** + * @param stream The stream (e.g. process.stderr) + * @returns true if the stream should have colourization enabled + */ + static isEnabled(stream) { + return (stream.isTTY && + (typeof stream.getColorDepth === 'function' + ? stream.getColorDepth() > 2 + : true)); + } + static refresh() { + Colours.enabled = Colours.isEnabled(process.stderr); + if (!this.enabled) { + Colours.reset = ''; + Colours.bright = ''; + Colours.dim = ''; + Colours.red = ''; + Colours.green = ''; + Colours.yellow = ''; + Colours.blue = ''; + Colours.magenta = ''; + Colours.cyan = ''; + Colours.white = ''; + Colours.grey = ''; + } + else { + Colours.reset = '\u001b[0m'; + Colours.bright = '\u001b[1m'; + Colours.dim = '\u001b[2m'; + Colours.red = '\u001b[31m'; + Colours.green = '\u001b[32m'; + Colours.yellow = '\u001b[33m'; + Colours.blue = '\u001b[34m'; + Colours.magenta = '\u001b[35m'; + Colours.cyan = '\u001b[36m'; + Colours.white = '\u001b[37m'; + Colours.grey = '\u001b[90m'; + } + } +} +exports.Colours = Colours; +Colours.enabled = false; +Colours.reset = ''; +Colours.bright = ''; +Colours.dim = ''; +Colours.red = ''; +Colours.green = ''; +Colours.yellow = ''; +Colours.blue = ''; +Colours.magenta = ''; +Colours.cyan = ''; +Colours.white = ''; +Colours.grey = ''; +Colours.refresh(); +//# sourceMappingURL=colours.js.map + +/***/ }), + +/***/ 1577: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +__exportStar(__nccwpck_require__(4788), exports); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 4788: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2021-2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.env = exports.DebugLogBackendBase = exports.placeholder = exports.AdhocDebugLogger = exports.LogSeverity = void 0; +exports.getNodeBackend = getNodeBackend; +exports.getDebugBackend = getDebugBackend; +exports.getStructuredBackend = getStructuredBackend; +exports.setBackend = setBackend; +exports.log = log; +const node_events_1 = __nccwpck_require__(8474); +const process = __importStar(__nccwpck_require__(1708)); +const util = __importStar(__nccwpck_require__(7975)); +const colours_1 = __nccwpck_require__(1628); +// Some functions (as noted) are based on the Node standard library, from +// the following file: +// +// https://github.com/nodejs/node/blob/main/lib/internal/util/debuglog.js +/** + * This module defines an ad-hoc debug logger for Google Cloud Platform + * client libraries in Node. An ad-hoc debug logger is a tool which lets + * users use an external, unified interface (in this case, environment + * variables) to determine what logging they want to see at runtime. This + * isn't necessarily fed into the console, but is meant to be under the + * control of the user. The kind of logging that will be produced by this + * is more like "call retry happened", not "event you'd want to record + * in Cloud Logger". + * + * More for Googlers implementing libraries with it: + * go/cloud-client-logging-design + */ +/** + * Possible log levels. These are a subset of Cloud Observability levels. + * https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry#LogSeverity + */ +var LogSeverity; +(function (LogSeverity) { + LogSeverity["DEFAULT"] = "DEFAULT"; + LogSeverity["DEBUG"] = "DEBUG"; + LogSeverity["INFO"] = "INFO"; + LogSeverity["WARNING"] = "WARNING"; + LogSeverity["ERROR"] = "ERROR"; +})(LogSeverity || (exports.LogSeverity = LogSeverity = {})); +/** + * Our logger instance. This actually contains the meat of dealing + * with log lines, including EventEmitter. This contains the function + * that will be passed back to users of the package. + */ +class AdhocDebugLogger extends node_events_1.EventEmitter { + /** + * @param upstream The backend will pass a function that will be + * called whenever our logger function is invoked. + */ + constructor(namespace, upstream) { + super(); + this.namespace = namespace; + this.upstream = upstream; + this.func = Object.assign(this.invoke.bind(this), { + // Also add an instance pointer back to us. + instance: this, + // And pull over the EventEmitter functionality. + on: (event, listener) => this.on(event, listener), + }); + // Convenience methods for log levels. + this.func.debug = (...args) => this.invokeSeverity(LogSeverity.DEBUG, ...args); + this.func.info = (...args) => this.invokeSeverity(LogSeverity.INFO, ...args); + this.func.warn = (...args) => this.invokeSeverity(LogSeverity.WARNING, ...args); + this.func.error = (...args) => this.invokeSeverity(LogSeverity.ERROR, ...args); + this.func.sublog = (namespace) => log(namespace, this.func); + } + invoke(fields, ...args) { + // Push out any upstream logger first. + if (this.upstream) { + this.upstream(fields, ...args); + } + // Emit sink events. + this.emit('log', fields, args); + } + invokeSeverity(severity, ...args) { + this.invoke({ severity }, ...args); + } +} +exports.AdhocDebugLogger = AdhocDebugLogger; +/** + * This can be used in place of a real logger while waiting for Promises or disabling logging. + */ +exports.placeholder = new AdhocDebugLogger('', () => { }).func; +/** + * The base class for debug logging backends. It's possible to use this, but the + * same non-guarantees above still apply (unstable interface, etc). + * + * @private + * @internal + */ +class DebugLogBackendBase { + constructor() { + var _a; + this.cached = new Map(); + this.filters = []; + this.filtersSet = false; + // Look for the Node config variable for what systems to enable. We'll store + // these for the log method below, which will call setFilters() once. + let nodeFlag = (_a = process.env[exports.env.nodeEnables]) !== null && _a !== void 0 ? _a : '*'; + if (nodeFlag === 'all') { + nodeFlag = '*'; + } + this.filters = nodeFlag.split(','); + } + log(namespace, fields, ...args) { + try { + if (!this.filtersSet) { + this.setFilters(); + this.filtersSet = true; + } + let logger = this.cached.get(namespace); + if (!logger) { + logger = this.makeLogger(namespace); + this.cached.set(namespace, logger); + } + logger(fields, ...args); + } + catch (e) { + // Silently ignore all errors; we don't want them to interfere with + // the user's running app. + // e; + console.error(e); + } + } +} +exports.DebugLogBackendBase = DebugLogBackendBase; +// The basic backend. This one definitely works, but it's less feature-filled. +// +// Rather than using util.debuglog, this implements the same basic logic directly. +// The reason for this decision is that debuglog checks the value of the +// NODE_DEBUG environment variable before any user code runs; we therefore +// can't pipe our own enables into it (and util.debuglog will never print unless +// the user duplicates it into NODE_DEBUG, which isn't reasonable). +// +class NodeBackend extends DebugLogBackendBase { + constructor() { + super(...arguments); + // Default to allowing all systems, since we gate earlier based on whether the + // variable is empty. + this.enabledRegexp = /.*/g; + } + isEnabled(namespace) { + return this.enabledRegexp.test(namespace); + } + makeLogger(namespace) { + if (!this.enabledRegexp.test(namespace)) { + return () => { }; + } + return (fields, ...args) => { + var _a; + // TODO: `fields` needs to be turned into a string here, one way or another. + const nscolour = `${colours_1.Colours.green}${namespace}${colours_1.Colours.reset}`; + const pid = `${colours_1.Colours.yellow}${process.pid}${colours_1.Colours.reset}`; + let level; + switch (fields.severity) { + case LogSeverity.ERROR: + level = `${colours_1.Colours.red}${fields.severity}${colours_1.Colours.reset}`; + break; + case LogSeverity.INFO: + level = `${colours_1.Colours.magenta}${fields.severity}${colours_1.Colours.reset}`; + break; + case LogSeverity.WARNING: + level = `${colours_1.Colours.yellow}${fields.severity}${colours_1.Colours.reset}`; + break; + default: + level = (_a = fields.severity) !== null && _a !== void 0 ? _a : LogSeverity.DEFAULT; + break; + } + const msg = util.formatWithOptions({ colors: colours_1.Colours.enabled }, ...args); + const filteredFields = Object.assign({}, fields); + delete filteredFields.severity; + const fieldsJson = Object.getOwnPropertyNames(filteredFields).length + ? JSON.stringify(filteredFields) + : ''; + const fieldsColour = fieldsJson + ? `${colours_1.Colours.grey}${fieldsJson}${colours_1.Colours.reset}` + : ''; + console.error('%s [%s|%s] %s%s', pid, nscolour, level, msg, fieldsJson ? ` ${fieldsColour}` : ''); + }; + } + // Regexp patterns below are from here: + // https://github.com/nodejs/node/blob/c0aebed4b3395bd65d54b18d1fd00f071002ac20/lib/internal/util/debuglog.js#L36 + setFilters() { + const totalFilters = this.filters.join(','); + const regexp = totalFilters + .replace(/[|\\{}()[\]^$+?.]/g, '\\$&') + .replace(/\*/g, '.*') + .replace(/,/g, '$|^'); + this.enabledRegexp = new RegExp(`^${regexp}$`, 'i'); + } +} +/** + * @returns A backend based on Node util.debuglog; this is the default. + */ +function getNodeBackend() { + return new NodeBackend(); +} +class DebugBackend extends DebugLogBackendBase { + constructor(pkg) { + super(); + this.debugPkg = pkg; + } + makeLogger(namespace) { + const debugLogger = this.debugPkg(namespace); + return (fields, ...args) => { + // TODO: `fields` needs to be turned into a string here. + debugLogger(args[0], ...args.slice(1)); + }; + } + setFilters() { + var _a; + const existingFilters = (_a = process.env['NODE_DEBUG']) !== null && _a !== void 0 ? _a : ''; + process.env['NODE_DEBUG'] = `${existingFilters}${existingFilters ? ',' : ''}${this.filters.join(',')}`; + } +} +/** + * Creates a "debug" package backend. The user must call require('debug') and pass + * the resulting object to this function. + * + * ``` + * setBackend(getDebugBackend(require('debug'))) + * ``` + * + * https://www.npmjs.com/package/debug + * + * Note: Google does not explicitly endorse or recommend this package; it's just + * being provided as an option. + * + * @returns A backend based on the npm "debug" package. + */ +function getDebugBackend(debugPkg) { + return new DebugBackend(debugPkg); +} +/** + * This pretty much works like the Node logger, but it outputs structured + * logging JSON matching Google Cloud's ingestion specs. Rather than handling + * its own output, it wraps another backend. The passed backend must be a subclass + * of `DebugLogBackendBase` (any of the backends exposed by this package will work). + */ +class StructuredBackend extends DebugLogBackendBase { + constructor(upstream) { + var _a; + super(); + this.upstream = (_a = upstream) !== null && _a !== void 0 ? _a : new NodeBackend(); + } + makeLogger(namespace) { + const debugLogger = this.upstream.makeLogger(namespace); + return (fields, ...args) => { + var _a; + const severity = (_a = fields.severity) !== null && _a !== void 0 ? _a : LogSeverity.INFO; + const json = Object.assign({ + severity, + message: util.format(...args), + }, fields); + const jsonString = JSON.stringify(json); + debugLogger(fields, jsonString); + }; + } + setFilters() { + this.upstream.setFilters(); + } +} +/** + * Creates a "structured logging" backend. This pretty much works like the + * Node logger, but it outputs structured logging JSON matching Google + * Cloud's ingestion specs instead of plain text. + * + * ``` + * setBackend(getStructuredBackend()) + * ``` + * + * @param upstream If you want to use something besides the Node backend to + * write the actual log lines into, pass that here. + * @returns A backend based on Google Cloud structured logging. + */ +function getStructuredBackend(upstream) { + return new StructuredBackend(upstream); +} +/** + * The environment variables that we standardized on, for all ad-hoc logging. + */ +exports.env = { + /** + * Filter wildcards specific to the Node syntax, and similar to the built-in + * utils.debuglog() environment variable. If missing, disables logging. + */ + nodeEnables: 'GOOGLE_SDK_NODE_LOGGING', +}; +// Keep a copy of all namespaced loggers so users can reliably .on() them. +// Note that these cached functions will need to deal with changes in the backend. +const loggerCache = new Map(); +// Our current global backend. This might be: +let cachedBackend = undefined; +/** + * Set the backend to use for our log output. + * - A backend object + * - null to disable logging + * - undefined for "nothing yet", defaults to the Node backend + * + * @param backend Results from one of the get*Backend() functions. + */ +function setBackend(backend) { + cachedBackend = backend; + loggerCache.clear(); +} +/** + * Creates a logging function. Multiple calls to this with the same namespace + * will produce the same logger, with the same event emitter hooks. + * + * Namespaces can be a simple string ("system" name), or a qualified string + * (system:subsystem), which can be used for filtering, or for "system:*". + * + * @param namespace The namespace, a descriptive text string. + * @returns A function you can call that works similar to console.log(). + */ +function log(namespace, parent) { + // If the enable flag isn't set, do nothing. + const enablesFlag = process.env[exports.env.nodeEnables]; + if (!enablesFlag) { + return exports.placeholder; + } + // This might happen mostly if the typings are dropped in a user's code, + // or if they're calling from JavaScript. + if (!namespace) { + return exports.placeholder; + } + // Handle sub-loggers. + if (parent) { + namespace = `${parent.instance.namespace}:${namespace}`; + } + // Reuse loggers so things like event sinks are persistent. + const existing = loggerCache.get(namespace); + if (existing) { + return existing.func; + } + // Do we have a backend yet? + if (cachedBackend === null) { + // Explicitly disabled. + return exports.placeholder; + } + else if (cachedBackend === undefined) { + // One hasn't been made yet, so default to Node. + cachedBackend = getNodeBackend(); + } + // The logger is further wrapped so we can handle the backend changing out. + const logger = (() => { + let previousBackend = undefined; + const newLogger = new AdhocDebugLogger(namespace, (fields, ...args) => { + if (previousBackend !== cachedBackend) { + // Did the user pass a custom backend? + if (cachedBackend === null) { + // Explicitly disabled. + return; + } + else if (cachedBackend === undefined) { + // One hasn't been made yet, so default to Node. + cachedBackend = getNodeBackend(); + } + previousBackend = cachedBackend; + } + cachedBackend === null || cachedBackend === void 0 ? void 0 : cachedBackend.log(namespace, fields, ...args); + }); + return newLogger; + })(); + loggerCache.set(namespace, logger); + return logger.func; +} +//# sourceMappingURL=logging-utils.js.map + +/***/ }), + +/***/ 8568: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +/** + * Copyright 2018 Google LLC + * + * Distributed under MIT license. + * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT + */ +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var _GoogleToken_instances, _GoogleToken_inFlightRequest, _GoogleToken_getTokenAsync, _GoogleToken_getTokenAsyncInner, _GoogleToken_ensureEmail, _GoogleToken_revokeTokenAsync, _GoogleToken_configure, _GoogleToken_requestToken; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GoogleToken = void 0; +const fs = __nccwpck_require__(9896); +const gaxios_1 = __nccwpck_require__(7003); +const jws = __nccwpck_require__(3324); +const path = __nccwpck_require__(6928); +const util_1 = __nccwpck_require__(9023); +const readFile = fs.readFile + ? (0, util_1.promisify)(fs.readFile) + : async () => { + // if running in the web-browser, fs.readFile may not have been shimmed. + throw new ErrorWithCode('use key rather than keyFile.', 'MISSING_CREDENTIALS'); + }; +const GOOGLE_TOKEN_URL = 'https://www.googleapis.com/oauth2/v4/token'; +const GOOGLE_REVOKE_TOKEN_URL = 'https://accounts.google.com/o/oauth2/revoke?token='; +class ErrorWithCode extends Error { + constructor(message, code) { + super(message); + this.code = code; + } +} +class GoogleToken { + get accessToken() { + return this.rawToken ? this.rawToken.access_token : undefined; + } + get idToken() { + return this.rawToken ? this.rawToken.id_token : undefined; + } + get tokenType() { + return this.rawToken ? this.rawToken.token_type : undefined; + } + get refreshToken() { + return this.rawToken ? this.rawToken.refresh_token : undefined; + } + /** + * Create a GoogleToken. + * + * @param options Configuration object. + */ + constructor(options) { + _GoogleToken_instances.add(this); + this.transporter = { + request: opts => (0, gaxios_1.request)(opts), + }; + _GoogleToken_inFlightRequest.set(this, void 0); + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_configure).call(this, options); + } + /** + * Returns whether the token has expired. + * + * @return true if the token has expired, false otherwise. + */ + hasExpired() { + const now = new Date().getTime(); + if (this.rawToken && this.expiresAt) { + return now >= this.expiresAt; + } + else { + return true; + } + } + /** + * Returns whether the token will expire within eagerRefreshThresholdMillis + * + * @return true if the token will be expired within eagerRefreshThresholdMillis, false otherwise. + */ + isTokenExpiring() { + var _a; + const now = new Date().getTime(); + const eagerRefreshThresholdMillis = (_a = this.eagerRefreshThresholdMillis) !== null && _a !== void 0 ? _a : 0; + if (this.rawToken && this.expiresAt) { + return this.expiresAt <= now + eagerRefreshThresholdMillis; + } + else { + return true; + } + } + getToken(callback, opts = {}) { + if (typeof callback === 'object') { + opts = callback; + callback = undefined; + } + opts = Object.assign({ + forceRefresh: false, + }, opts); + if (callback) { + const cb = callback; + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_getTokenAsync).call(this, opts).then(t => cb(null, t), callback); + return; + } + return __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_getTokenAsync).call(this, opts); + } + /** + * Given a keyFile, extract the key and client email if available + * @param keyFile Path to a json, pem, or p12 file that contains the key. + * @returns an object with privateKey and clientEmail properties + */ + async getCredentials(keyFile) { + const ext = path.extname(keyFile); + switch (ext) { + case '.json': { + const key = await readFile(keyFile, 'utf8'); + const body = JSON.parse(key); + const privateKey = body.private_key; + const clientEmail = body.client_email; + if (!privateKey || !clientEmail) { + throw new ErrorWithCode('private_key and client_email are required.', 'MISSING_CREDENTIALS'); + } + return { privateKey, clientEmail }; + } + case '.der': + case '.crt': + case '.pem': { + const privateKey = await readFile(keyFile, 'utf8'); + return { privateKey }; + } + case '.p12': + case '.pfx': { + throw new ErrorWithCode('*.p12 certificates are not supported after v6.1.2. ' + + 'Consider utilizing *.json format or converting *.p12 to *.pem using the OpenSSL CLI.', 'UNKNOWN_CERTIFICATE_TYPE'); + } + default: + throw new ErrorWithCode('Unknown certificate type. Type is determined based on file extension. ' + + 'Current supported extensions are *.json, and *.pem.', 'UNKNOWN_CERTIFICATE_TYPE'); + } + } + revokeToken(callback) { + if (callback) { + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_revokeTokenAsync).call(this).then(() => callback(), callback); + return; + } + return __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_revokeTokenAsync).call(this); + } +} +exports.GoogleToken = GoogleToken; +_GoogleToken_inFlightRequest = new WeakMap(), _GoogleToken_instances = new WeakSet(), _GoogleToken_getTokenAsync = async function _GoogleToken_getTokenAsync(opts) { + if (__classPrivateFieldGet(this, _GoogleToken_inFlightRequest, "f") && !opts.forceRefresh) { + return __classPrivateFieldGet(this, _GoogleToken_inFlightRequest, "f"); + } + try { + return await (__classPrivateFieldSet(this, _GoogleToken_inFlightRequest, __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_getTokenAsyncInner).call(this, opts), "f")); + } + finally { + __classPrivateFieldSet(this, _GoogleToken_inFlightRequest, undefined, "f"); + } +}, _GoogleToken_getTokenAsyncInner = async function _GoogleToken_getTokenAsyncInner(opts) { + if (this.isTokenExpiring() === false && opts.forceRefresh === false) { + return Promise.resolve(this.rawToken); + } + if (!this.key && !this.keyFile) { + throw new Error('No key or keyFile set.'); + } + if (!this.key && this.keyFile) { + const creds = await this.getCredentials(this.keyFile); + this.key = creds.privateKey; + this.iss = creds.clientEmail || this.iss; + if (!creds.clientEmail) { + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_ensureEmail).call(this); + } + } + return __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_requestToken).call(this); +}, _GoogleToken_ensureEmail = function _GoogleToken_ensureEmail() { + if (!this.iss) { + throw new ErrorWithCode('email is required.', 'MISSING_CREDENTIALS'); + } +}, _GoogleToken_revokeTokenAsync = async function _GoogleToken_revokeTokenAsync() { + if (!this.accessToken) { + throw new Error('No token to revoke.'); + } + const url = GOOGLE_REVOKE_TOKEN_URL + this.accessToken; + await this.transporter.request({ + url, + retry: true, + }); + __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_configure).call(this, { + email: this.iss, + sub: this.sub, + key: this.key, + keyFile: this.keyFile, + scope: this.scope, + additionalClaims: this.additionalClaims, + }); +}, _GoogleToken_configure = function _GoogleToken_configure(options = {}) { + this.keyFile = options.keyFile; + this.key = options.key; + this.rawToken = undefined; + this.iss = options.email || options.iss; + this.sub = options.sub; + this.additionalClaims = options.additionalClaims; + if (typeof options.scope === 'object') { + this.scope = options.scope.join(' '); + } + else { + this.scope = options.scope; + } + this.eagerRefreshThresholdMillis = options.eagerRefreshThresholdMillis; + if (options.transporter) { + this.transporter = options.transporter; + } +}, _GoogleToken_requestToken = +/** + * Request the token from Google. + */ +async function _GoogleToken_requestToken() { + var _a, _b; + const iat = Math.floor(new Date().getTime() / 1000); + const additionalClaims = this.additionalClaims || {}; + const payload = Object.assign({ + iss: this.iss, + scope: this.scope, + aud: GOOGLE_TOKEN_URL, + exp: iat + 3600, + iat, + sub: this.sub, + }, additionalClaims); + const signedJWT = jws.sign({ + header: { alg: 'RS256' }, + payload, + secret: this.key, + }); + try { + const r = await this.transporter.request({ + method: 'POST', + url: GOOGLE_TOKEN_URL, + data: { + grant_type: 'urn:ietf:params:oauth:grant-type:jwt-bearer', + assertion: signedJWT, + }, + headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, + responseType: 'json', + retryConfig: { + httpMethodsToRetry: ['POST'], + }, + }); + this.rawToken = r.data; + this.expiresAt = + r.data.expires_in === null || r.data.expires_in === undefined + ? undefined + : (iat + r.data.expires_in) * 1000; + return this.rawToken; + } + catch (e) { + this.rawToken = undefined; + this.tokenExpires = undefined; + const body = e.response && ((_a = e.response) === null || _a === void 0 ? void 0 : _a.data) + ? (_b = e.response) === null || _b === void 0 ? void 0 : _b.data + : {}; + if (body.error) { + const desc = body.error_description + ? `: ${body.error_description}` + : ''; + e.message = `${body.error}${desc}`; + } + throw e; + } +}; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 3813: +/***/ ((module) => { + +"use strict"; + + +module.exports = (flag, argv = process.argv) => { + const prefix = flag.startsWith('-') ? '' : (flag.length === 1 ? '-' : '--'); + const position = argv.indexOf(prefix + flag); + const terminatorPosition = argv.indexOf('--'); + return position !== -1 && (terminatorPosition === -1 || position < terminatorPosition); +}; + + +/***/ }), + +/***/ 7847: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const net_1 = __importDefault(__nccwpck_require__(9278)); +const tls_1 = __importDefault(__nccwpck_require__(4756)); +const url_1 = __importDefault(__nccwpck_require__(7016)); +const debug_1 = __importDefault(__nccwpck_require__(2830)); +const once_1 = __importDefault(__nccwpck_require__(8662)); +const agent_base_1 = __nccwpck_require__(2960); +const debug = (0, debug_1.default)('http-proxy-agent'); +function isHTTPS(protocol) { + return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false; +} +/** + * The `HttpProxyAgent` implements an HTTP Agent subclass that connects + * to the specified "HTTP proxy server" in order to proxy HTTP requests. + * + * @api public + */ +class HttpProxyAgent extends agent_base_1.Agent { + constructor(_opts) { + let opts; + if (typeof _opts === 'string') { + opts = url_1.default.parse(_opts); + } + else { + opts = _opts; + } + if (!opts) { + throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!'); + } + debug('Creating new HttpProxyAgent instance: %o', opts); + super(opts); + const proxy = Object.assign({}, opts); + // If `true`, then connect to the proxy server over TLS. + // Defaults to `false`. + this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol); + // Prefer `hostname` over `host`, and set the `port` if needed. + proxy.host = proxy.hostname || proxy.host; + if (typeof proxy.port === 'string') { + proxy.port = parseInt(proxy.port, 10); + } + if (!proxy.port && proxy.host) { + proxy.port = this.secureProxy ? 443 : 80; + } + if (proxy.host && proxy.path) { + // If both a `host` and `path` are specified then it's most likely + // the result of a `url.parse()` call... we need to remove the + // `path` portion so that `net.connect()` doesn't attempt to open + // that as a Unix socket file. + delete proxy.path; + delete proxy.pathname; + } + this.proxy = proxy; + } + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + * + * @api protected + */ + callback(req, opts) { + return __awaiter(this, void 0, void 0, function* () { + const { proxy, secureProxy } = this; + const parsed = url_1.default.parse(req.path); + if (!parsed.protocol) { + parsed.protocol = 'http:'; + } + if (!parsed.hostname) { + parsed.hostname = opts.hostname || opts.host || null; + } + if (parsed.port == null && typeof opts.port) { + parsed.port = String(opts.port); + } + if (parsed.port === '80') { + // if port is 80, then we can remove the port so that the + // ":80" portion is not on the produced URL + parsed.port = ''; + } + // Change the `http.ClientRequest` instance's "path" field + // to the absolute path of the URL that will be requested. + req.path = url_1.default.format(parsed); + // Inject the `Proxy-Authorization` header if necessary. + if (proxy.auth) { + req.setHeader('Proxy-Authorization', `Basic ${Buffer.from(proxy.auth).toString('base64')}`); + } + // Create a socket connection to the proxy server. + let socket; + if (secureProxy) { + debug('Creating `tls.Socket`: %o', proxy); + socket = tls_1.default.connect(proxy); + } + else { + debug('Creating `net.Socket`: %o', proxy); + socket = net_1.default.connect(proxy); + } + // At this point, the http ClientRequest's internal `_header` field + // might have already been set. If this is the case then we'll need + // to re-generate the string since we just changed the `req.path`. + if (req._header) { + let first; + let endOfHeaders; + debug('Regenerating stored HTTP header string for request'); + req._header = null; + req._implicitHeader(); + if (req.output && req.output.length > 0) { + // Node < 12 + debug('Patching connection write() output buffer with updated header'); + first = req.output[0]; + endOfHeaders = first.indexOf('\r\n\r\n') + 4; + req.output[0] = req._header + first.substring(endOfHeaders); + debug('Output buffer: %o', req.output); + } + else if (req.outputData && req.outputData.length > 0) { + // Node >= 12 + debug('Patching connection write() output buffer with updated header'); + first = req.outputData[0].data; + endOfHeaders = first.indexOf('\r\n\r\n') + 4; + req.outputData[0].data = + req._header + first.substring(endOfHeaders); + debug('Output buffer: %o', req.outputData[0].data); + } + } + // Wait for the socket's `connect` event, so that this `callback()` + // function throws instead of the `http` request machinery. This is + // important for i.e. `PacProxyAgent` which determines a failed proxy + // connection via the `callback()` function throwing. + yield (0, once_1.default)(socket, 'connect'); + return socket; + }); + } +} +exports["default"] = HttpProxyAgent; +//# sourceMappingURL=agent.js.map + +/***/ }), + +/***/ 1970: +/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const agent_1 = __importDefault(__nccwpck_require__(7847)); +function createHttpProxyAgent(opts) { + return new agent_1.default(opts); +} +(function (createHttpProxyAgent) { + createHttpProxyAgent.HttpProxyAgent = agent_1.default; + createHttpProxyAgent.prototype = agent_1.default.prototype; +})(createHttpProxyAgent || (createHttpProxyAgent = {})); +module.exports = createHttpProxyAgent; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 2960: +/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const events_1 = __nccwpck_require__(4434); +const debug_1 = __importDefault(__nccwpck_require__(2830)); +const promisify_1 = __importDefault(__nccwpck_require__(2600)); +const debug = debug_1.default('agent-base'); +function isAgent(v) { + return Boolean(v) && typeof v.addRequest === 'function'; +} +function isSecureEndpoint() { + const { stack } = new Error(); + if (typeof stack !== 'string') + return false; + return stack.split('\n').some(l => l.indexOf('(https.js:') !== -1 || l.indexOf('node:https:') !== -1); +} +function createAgent(callback, opts) { + return new createAgent.Agent(callback, opts); +} +(function (createAgent) { + /** + * Base `http.Agent` implementation. + * No pooling/keep-alive is implemented by default. + * + * @param {Function} callback + * @api public + */ + class Agent extends events_1.EventEmitter { + constructor(callback, _opts) { + super(); + let opts = _opts; + if (typeof callback === 'function') { + this.callback = callback; + } + else if (callback) { + opts = callback; + } + // Timeout for the socket to be returned from the callback + this.timeout = null; + if (opts && typeof opts.timeout === 'number') { + this.timeout = opts.timeout; + } + // These aren't actually used by `agent-base`, but are required + // for the TypeScript definition files in `@types/node` :/ + this.maxFreeSockets = 1; + this.maxSockets = 1; + this.maxTotalSockets = Infinity; + this.sockets = {}; + this.freeSockets = {}; + this.requests = {}; + this.options = {}; + } + get defaultPort() { + if (typeof this.explicitDefaultPort === 'number') { + return this.explicitDefaultPort; + } + return isSecureEndpoint() ? 443 : 80; + } + set defaultPort(v) { + this.explicitDefaultPort = v; + } + get protocol() { + if (typeof this.explicitProtocol === 'string') { + return this.explicitProtocol; + } + return isSecureEndpoint() ? 'https:' : 'http:'; + } + set protocol(v) { + this.explicitProtocol = v; + } + callback(req, opts, fn) { + throw new Error('"agent-base" has no default implementation, you must subclass and override `callback()`'); + } + /** + * Called by node-core's "_http_client.js" module when creating + * a new HTTP request with this Agent instance. + * + * @api public + */ + addRequest(req, _opts) { + const opts = Object.assign({}, _opts); + if (typeof opts.secureEndpoint !== 'boolean') { + opts.secureEndpoint = isSecureEndpoint(); + } + if (opts.host == null) { + opts.host = 'localhost'; + } + if (opts.port == null) { + opts.port = opts.secureEndpoint ? 443 : 80; + } + if (opts.protocol == null) { + opts.protocol = opts.secureEndpoint ? 'https:' : 'http:'; + } + if (opts.host && opts.path) { + // If both a `host` and `path` are specified then it's most + // likely the result of a `url.parse()` call... we need to + // remove the `path` portion so that `net.connect()` doesn't + // attempt to open that as a unix socket file. + delete opts.path; + } + delete opts.agent; + delete opts.hostname; + delete opts._defaultAgent; + delete opts.defaultPort; + delete opts.createConnection; + // Hint to use "Connection: close" + // XXX: non-documented `http` module API :( + req._last = true; + req.shouldKeepAlive = false; + let timedOut = false; + let timeoutId = null; + const timeoutMs = opts.timeout || this.timeout; + const onerror = (err) => { + if (req._hadError) + return; + req.emit('error', err); + // For Safety. Some additional errors might fire later on + // and we need to make sure we don't double-fire the error event. + req._hadError = true; + }; + const ontimeout = () => { + timeoutId = null; + timedOut = true; + const err = new Error(`A "socket" was not created for HTTP request before ${timeoutMs}ms`); + err.code = 'ETIMEOUT'; + onerror(err); + }; + const callbackError = (err) => { + if (timedOut) + return; + if (timeoutId !== null) { + clearTimeout(timeoutId); + timeoutId = null; + } + onerror(err); + }; + const onsocket = (socket) => { + if (timedOut) + return; + if (timeoutId != null) { + clearTimeout(timeoutId); + timeoutId = null; + } + if (isAgent(socket)) { + // `socket` is actually an `http.Agent` instance, so + // relinquish responsibility for this `req` to the Agent + // from here on + debug('Callback returned another Agent instance %o', socket.constructor.name); + socket.addRequest(req, opts); + return; + } + if (socket) { + socket.once('free', () => { + this.freeSocket(socket, opts); + }); + req.onSocket(socket); + return; + } + const err = new Error(`no Duplex stream was returned to agent-base for \`${req.method} ${req.path}\``); + onerror(err); + }; + if (typeof this.callback !== 'function') { + onerror(new Error('`callback` is not defined')); + return; + } + if (!this.promisifiedCallback) { + if (this.callback.length >= 3) { + debug('Converting legacy callback function to promise'); + this.promisifiedCallback = promisify_1.default(this.callback); + } + else { + this.promisifiedCallback = this.callback; + } + } + if (typeof timeoutMs === 'number' && timeoutMs > 0) { + timeoutId = setTimeout(ontimeout, timeoutMs); + } + if ('port' in opts && typeof opts.port !== 'number') { + opts.port = Number(opts.port); + } + try { + debug('Resolving socket for %o request: %o', opts.protocol, `${req.method} ${req.path}`); + Promise.resolve(this.promisifiedCallback(req, opts)).then(onsocket, callbackError); + } + catch (err) { + Promise.reject(err).catch(callbackError); + } + } + freeSocket(socket, opts) { + debug('Freeing socket %o %o', socket.constructor.name, opts); + socket.destroy(); + } + destroy() { + debug('Destroying agent %o', this.constructor.name); + } + } + createAgent.Agent = Agent; + // So that `instanceof` works correctly + createAgent.prototype = createAgent.Agent.prototype; +})(createAgent || (createAgent = {})); +module.exports = createAgent; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 2600: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +function promisify(fn) { + return function (req, opts) { + return new Promise((resolve, reject) => { + fn.call(this, req, opts, (err, rtn) => { + if (err) { + reject(err); + } + else { + resolve(rtn); + } + }); + }); + }; +} +exports["default"] = promisify; +//# sourceMappingURL=promisify.js.map + +/***/ }), + +/***/ 3669: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpsProxyAgent = void 0; +const net = __importStar(__nccwpck_require__(9278)); +const tls = __importStar(__nccwpck_require__(4756)); +const assert_1 = __importDefault(__nccwpck_require__(2613)); +const debug_1 = __importDefault(__nccwpck_require__(2830)); +const agent_base_1 = __nccwpck_require__(8894); +const url_1 = __nccwpck_require__(7016); +const parse_proxy_response_1 = __nccwpck_require__(7943); +const debug = (0, debug_1.default)('https-proxy-agent'); +const setServernameFromNonIpHost = (options) => { + if (options.servername === undefined && + options.host && + !net.isIP(options.host)) { + return { + ...options, + servername: options.host, + }; + } + return options; +}; +/** + * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to + * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests. + * + * Outgoing HTTP requests are first tunneled through the proxy server using the + * `CONNECT` HTTP request method to establish a connection to the proxy server, + * and then the proxy server connects to the destination target and issues the + * HTTP request from the proxy server. + * + * `https:` requests have their socket connection upgraded to TLS once + * the connection to the proxy server has been established. + */ +class HttpsProxyAgent extends agent_base_1.Agent { + constructor(proxy, opts) { + super(opts); + this.options = { path: undefined }; + this.proxy = typeof proxy === 'string' ? new url_1.URL(proxy) : proxy; + this.proxyHeaders = opts?.headers ?? {}; + debug('Creating new HttpsProxyAgent instance: %o', this.proxy.href); + // Trim off the brackets from IPv6 addresses + const host = (this.proxy.hostname || this.proxy.host).replace(/^\[|\]$/g, ''); + const port = this.proxy.port + ? parseInt(this.proxy.port, 10) + : this.proxy.protocol === 'https:' + ? 443 + : 80; + this.connectOpts = { + // Attempt to negotiate http/1.1 for proxy servers that support http/2 + ALPNProtocols: ['http/1.1'], + ...(opts ? omit(opts, 'headers') : null), + host, + port, + }; + } + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + */ + async connect(req, opts) { + const { proxy } = this; + if (!opts.host) { + throw new TypeError('No "host" provided'); + } + // Create a socket connection to the proxy server. + let socket; + if (proxy.protocol === 'https:') { + debug('Creating `tls.Socket`: %o', this.connectOpts); + socket = tls.connect(setServernameFromNonIpHost(this.connectOpts)); + } + else { + debug('Creating `net.Socket`: %o', this.connectOpts); + socket = net.connect(this.connectOpts); + } + const headers = typeof this.proxyHeaders === 'function' + ? this.proxyHeaders() + : { ...this.proxyHeaders }; + const host = net.isIPv6(opts.host) ? `[${opts.host}]` : opts.host; + let payload = `CONNECT ${host}:${opts.port} HTTP/1.1\r\n`; + // Inject the `Proxy-Authorization` header if necessary. + if (proxy.username || proxy.password) { + const auth = `${decodeURIComponent(proxy.username)}:${decodeURIComponent(proxy.password)}`; + headers['Proxy-Authorization'] = `Basic ${Buffer.from(auth).toString('base64')}`; + } + headers.Host = `${host}:${opts.port}`; + if (!headers['Proxy-Connection']) { + headers['Proxy-Connection'] = this.keepAlive + ? 'Keep-Alive' + : 'close'; + } + for (const name of Object.keys(headers)) { + payload += `${name}: ${headers[name]}\r\n`; + } + const proxyResponsePromise = (0, parse_proxy_response_1.parseProxyResponse)(socket); + socket.write(`${payload}\r\n`); + const { connect, buffered } = await proxyResponsePromise; + req.emit('proxyConnect', connect); + this.emit('proxyConnect', connect, req); + if (connect.statusCode === 200) { + req.once('socket', resume); + if (opts.secureEndpoint) { + // The proxy is connecting to a TLS server, so upgrade + // this socket connection to a TLS connection. + debug('Upgrading socket connection to TLS'); + return tls.connect({ + ...omit(setServernameFromNonIpHost(opts), 'host', 'path', 'port'), + socket, + }); + } + return socket; + } + // Some other status code that's not 200... need to re-play the HTTP + // header "data" events onto the socket once the HTTP machinery is + // attached so that the node core `http` can parse and handle the + // error status code. + // Close the original socket, and a new "fake" socket is returned + // instead, so that the proxy doesn't get the HTTP request + // written to it (which may contain `Authorization` headers or other + // sensitive data). + // + // See: https://hackerone.com/reports/541502 + socket.destroy(); + const fakeSocket = new net.Socket({ writable: false }); + fakeSocket.readable = true; + // Need to wait for the "socket" event to re-play the "data" events. + req.once('socket', (s) => { + debug('Replaying proxy buffer for failed request'); + (0, assert_1.default)(s.listenerCount('data') > 0); + // Replay the "buffered" Buffer onto the fake `socket`, since at + // this point the HTTP module machinery has been hooked up for + // the user. + s.push(buffered); + s.push(null); + }); + return fakeSocket; + } +} +HttpsProxyAgent.protocols = ['http', 'https']; +exports.HttpsProxyAgent = HttpsProxyAgent; +function resume(socket) { + socket.resume(); +} +function omit(obj, ...keys) { + const ret = {}; + let key; + for (key in obj) { + if (!keys.includes(key)) { + ret[key] = obj[key]; + } + } + return ret; +} +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 7943: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.parseProxyResponse = void 0; +const debug_1 = __importDefault(__nccwpck_require__(2830)); +const debug = (0, debug_1.default)('https-proxy-agent:parse-proxy-response'); +function parseProxyResponse(socket) { + return new Promise((resolve, reject) => { + // we need to buffer any HTTP traffic that happens with the proxy before we get + // the CONNECT response, so that if the response is anything other than an "200" + // response code, then we can re-play the "data" events on the socket once the + // HTTP parser is hooked up... + let buffersLength = 0; + const buffers = []; + function read() { + const b = socket.read(); + if (b) + ondata(b); + else + socket.once('readable', read); + } + function cleanup() { + socket.removeListener('end', onend); + socket.removeListener('error', onerror); + socket.removeListener('readable', read); + } + function onend() { + cleanup(); + debug('onend'); + reject(new Error('Proxy connection ended before receiving CONNECT response')); + } + function onerror(err) { + cleanup(); + debug('onerror %o', err); + reject(err); + } + function ondata(b) { + buffers.push(b); + buffersLength += b.length; + const buffered = Buffer.concat(buffers, buffersLength); + const endOfHeaders = buffered.indexOf('\r\n\r\n'); + if (endOfHeaders === -1) { + // keep buffering + debug('have not received end of HTTP headers yet...'); + read(); + return; + } + const headerParts = buffered + .slice(0, endOfHeaders) + .toString('ascii') + .split('\r\n'); + const firstLine = headerParts.shift(); + if (!firstLine) { + socket.destroy(); + return reject(new Error('No header received from proxy CONNECT response')); + } + const firstLineParts = firstLine.split(' '); + const statusCode = +firstLineParts[1]; + const statusText = firstLineParts.slice(2).join(' '); + const headers = {}; + for (const header of headerParts) { + if (!header) + continue; + const firstColon = header.indexOf(':'); + if (firstColon === -1) { + socket.destroy(); + return reject(new Error(`Invalid header from proxy CONNECT response: "${header}"`)); + } + const key = header.slice(0, firstColon).toLowerCase(); + const value = header.slice(firstColon + 1).trimStart(); + const current = headers[key]; + if (typeof current === 'string') { + headers[key] = [current, value]; + } + else if (Array.isArray(current)) { + current.push(value); + } + else { + headers[key] = value; + } + } + debug('got proxy server response: %o %o', firstLine, headers); + cleanup(); + resolve({ + connect: { + statusCode, + statusText, + headers, + }, + buffered, + }); + } + socket.on('error', onerror); + socket.on('end', onend); + read(); + }); +} +exports.parseProxyResponse = parseProxyResponse; +//# sourceMappingURL=parse-proxy-response.js.map + +/***/ }), + +/***/ 9598: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +try { + var util = __nccwpck_require__(9023); + /* istanbul ignore next */ + if (typeof util.inherits !== 'function') throw ''; + module.exports = util.inherits; +} catch (e) { + /* istanbul ignore next */ + module.exports = __nccwpck_require__(6589); +} + + +/***/ }), + +/***/ 6589: +/***/ ((module) => { + +if (typeof Object.create === 'function') { + // implementation from standard node.js 'util' module + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + ctor.prototype = Object.create(superCtor.prototype, { + constructor: { + value: ctor, + enumerable: false, + writable: true, + configurable: true + } + }) + } + }; +} else { + // old school shim for old browsers + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + var TempCtor = function () {} + TempCtor.prototype = superCtor.prototype + ctor.prototype = new TempCtor() + ctor.prototype.constructor = ctor + } + } +} + + +/***/ }), + +/***/ 6543: +/***/ ((module) => { + +"use strict"; + + +const isStream = stream => + stream !== null && + typeof stream === 'object' && + typeof stream.pipe === 'function'; + +isStream.writable = stream => + isStream(stream) && + stream.writable !== false && + typeof stream._write === 'function' && + typeof stream._writableState === 'object'; + +isStream.readable = stream => + isStream(stream) && + stream.readable !== false && + typeof stream._read === 'function' && + typeof stream._readableState === 'object'; + +isStream.duplex = stream => + isStream.writable(stream) && + isStream.readable(stream); + +isStream.transform = stream => + isStream.duplex(stream) && + typeof stream._transform === 'function'; + +module.exports = isStream; + + +/***/ }), + +/***/ 4826: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var json_stringify = (__nccwpck_require__(3651).stringify); +var json_parse = __nccwpck_require__(3197); + +module.exports = function(options) { + return { + parse: json_parse(options), + stringify: json_stringify + } +}; +//create the default method members with no options applied for backwards compatibility +module.exports.parse = json_parse(); +module.exports.stringify = json_stringify; + + +/***/ }), + +/***/ 3197: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var BigNumber = null; + +// regexpxs extracted from +// (c) BSD-3-Clause +// https://github.com/fastify/secure-json-parse/graphs/contributors and https://github.com/hapijs/bourne/graphs/contributors + +const suspectProtoRx = /(?:_|\\u005[Ff])(?:_|\\u005[Ff])(?:p|\\u0070)(?:r|\\u0072)(?:o|\\u006[Ff])(?:t|\\u0074)(?:o|\\u006[Ff])(?:_|\\u005[Ff])(?:_|\\u005[Ff])/; +const suspectConstructorRx = /(?:c|\\u0063)(?:o|\\u006[Ff])(?:n|\\u006[Ee])(?:s|\\u0073)(?:t|\\u0074)(?:r|\\u0072)(?:u|\\u0075)(?:c|\\u0063)(?:t|\\u0074)(?:o|\\u006[Ff])(?:r|\\u0072)/; + +/* + json_parse.js + 2012-06-20 + + Public Domain. + + NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK. + + This file creates a json_parse function. + During create you can (optionally) specify some behavioural switches + + require('json-bigint')(options) + + The optional options parameter holds switches that drive certain + aspects of the parsing process: + * options.strict = true will warn about duplicate-key usage in the json. + The default (strict = false) will silently ignore those and overwrite + values for keys that are in duplicate use. + + The resulting function follows this signature: + json_parse(text, reviver) + This method parses a JSON text to produce an object or array. + It can throw a SyntaxError exception. + + The optional reviver parameter is a function that can filter and + transform the results. It receives each of the keys and values, + and its return value is used instead of the original value. + If it returns what it received, then the structure is not modified. + If it returns undefined then the member is deleted. + + Example: + + // Parse the text. Values that look like ISO date strings will + // be converted to Date objects. + + myData = json_parse(text, function (key, value) { + var a; + if (typeof value === 'string') { + a = +/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value); + if (a) { + return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4], + +a[5], +a[6])); + } + } + return value; + }); + + This is a reference implementation. You are free to copy, modify, or + redistribute. + + This code should be minified before deployment. + See http://javascript.crockford.com/jsmin.html + + USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO + NOT CONTROL. +*/ + +/*members "", "\"", "\/", "\\", at, b, call, charAt, f, fromCharCode, + hasOwnProperty, message, n, name, prototype, push, r, t, text +*/ + +var json_parse = function (options) { + 'use strict'; + + // This is a function that can parse a JSON text, producing a JavaScript + // data structure. It is a simple, recursive descent parser. It does not use + // eval or regular expressions, so it can be used as a model for implementing + // a JSON parser in other languages. + + // We are defining the function inside of another function to avoid creating + // global variables. + + // Default options one can override by passing options to the parse() + var _options = { + strict: false, // not being strict means do not generate syntax errors for "duplicate key" + storeAsString: false, // toggles whether the values should be stored as BigNumber (default) or a string + alwaysParseAsBig: false, // toggles whether all numbers should be Big + useNativeBigInt: false, // toggles whether to use native BigInt instead of bignumber.js + protoAction: 'error', + constructorAction: 'error', + }; + + // If there are options, then use them to override the default _options + if (options !== undefined && options !== null) { + if (options.strict === true) { + _options.strict = true; + } + if (options.storeAsString === true) { + _options.storeAsString = true; + } + _options.alwaysParseAsBig = + options.alwaysParseAsBig === true ? options.alwaysParseAsBig : false; + _options.useNativeBigInt = + options.useNativeBigInt === true ? options.useNativeBigInt : false; + + if (typeof options.constructorAction !== 'undefined') { + if ( + options.constructorAction === 'error' || + options.constructorAction === 'ignore' || + options.constructorAction === 'preserve' + ) { + _options.constructorAction = options.constructorAction; + } else { + throw new Error( + `Incorrect value for constructorAction option, must be "error", "ignore" or undefined but passed ${options.constructorAction}` + ); + } + } + + if (typeof options.protoAction !== 'undefined') { + if ( + options.protoAction === 'error' || + options.protoAction === 'ignore' || + options.protoAction === 'preserve' + ) { + _options.protoAction = options.protoAction; + } else { + throw new Error( + `Incorrect value for protoAction option, must be "error", "ignore" or undefined but passed ${options.protoAction}` + ); + } + } + } + + var at, // The index of the current character + ch, // The current character + escapee = { + '"': '"', + '\\': '\\', + '/': '/', + b: '\b', + f: '\f', + n: '\n', + r: '\r', + t: '\t', + }, + text, + error = function (m) { + // Call error when something is wrong. + + throw { + name: 'SyntaxError', + message: m, + at: at, + text: text, + }; + }, + next = function (c) { + // If a c parameter is provided, verify that it matches the current character. + + if (c && c !== ch) { + error("Expected '" + c + "' instead of '" + ch + "'"); + } + + // Get the next character. When there are no more characters, + // return the empty string. + + ch = text.charAt(at); + at += 1; + return ch; + }, + number = function () { + // Parse a number value. + + var number, + string = ''; + + if (ch === '-') { + string = '-'; + next('-'); + } + while (ch >= '0' && ch <= '9') { + string += ch; + next(); + } + if (ch === '.') { + string += '.'; + while (next() && ch >= '0' && ch <= '9') { + string += ch; + } + } + if (ch === 'e' || ch === 'E') { + string += ch; + next(); + if (ch === '-' || ch === '+') { + string += ch; + next(); + } + while (ch >= '0' && ch <= '9') { + string += ch; + next(); + } + } + number = +string; + if (!isFinite(number)) { + error('Bad number'); + } else { + if (BigNumber == null) BigNumber = __nccwpck_require__(1259); + //if (number > 9007199254740992 || number < -9007199254740992) + // Bignumber has stricter check: everything with length > 15 digits disallowed + if (string.length > 15) + return _options.storeAsString + ? string + : _options.useNativeBigInt + ? BigInt(string) + : new BigNumber(string); + else + return !_options.alwaysParseAsBig + ? number + : _options.useNativeBigInt + ? BigInt(number) + : new BigNumber(number); + } + }, + string = function () { + // Parse a string value. + + var hex, + i, + string = '', + uffff; + + // When parsing for string values, we must look for " and \ characters. + + if (ch === '"') { + var startAt = at; + while (next()) { + if (ch === '"') { + if (at - 1 > startAt) string += text.substring(startAt, at - 1); + next(); + return string; + } + if (ch === '\\') { + if (at - 1 > startAt) string += text.substring(startAt, at - 1); + next(); + if (ch === 'u') { + uffff = 0; + for (i = 0; i < 4; i += 1) { + hex = parseInt(next(), 16); + if (!isFinite(hex)) { + break; + } + uffff = uffff * 16 + hex; + } + string += String.fromCharCode(uffff); + } else if (typeof escapee[ch] === 'string') { + string += escapee[ch]; + } else { + break; + } + startAt = at; + } + } + } + error('Bad string'); + }, + white = function () { + // Skip whitespace. + + while (ch && ch <= ' ') { + next(); + } + }, + word = function () { + // true, false, or null. + + switch (ch) { + case 't': + next('t'); + next('r'); + next('u'); + next('e'); + return true; + case 'f': + next('f'); + next('a'); + next('l'); + next('s'); + next('e'); + return false; + case 'n': + next('n'); + next('u'); + next('l'); + next('l'); + return null; + } + error("Unexpected '" + ch + "'"); + }, + value, // Place holder for the value function. + array = function () { + // Parse an array value. + + var array = []; + + if (ch === '[') { + next('['); + white(); + if (ch === ']') { + next(']'); + return array; // empty array + } + while (ch) { + array.push(value()); + white(); + if (ch === ']') { + next(']'); + return array; + } + next(','); + white(); + } + } + error('Bad array'); + }, + object = function () { + // Parse an object value. + + var key, + object = Object.create(null); + + if (ch === '{') { + next('{'); + white(); + if (ch === '}') { + next('}'); + return object; // empty object + } + while (ch) { + key = string(); + white(); + next(':'); + if ( + _options.strict === true && + Object.hasOwnProperty.call(object, key) + ) { + error('Duplicate key "' + key + '"'); + } + + if (suspectProtoRx.test(key) === true) { + if (_options.protoAction === 'error') { + error('Object contains forbidden prototype property'); + } else if (_options.protoAction === 'ignore') { + value(); + } else { + object[key] = value(); + } + } else if (suspectConstructorRx.test(key) === true) { + if (_options.constructorAction === 'error') { + error('Object contains forbidden constructor property'); + } else if (_options.constructorAction === 'ignore') { + value(); + } else { + object[key] = value(); + } + } else { + object[key] = value(); + } + + white(); + if (ch === '}') { + next('}'); + return object; + } + next(','); + white(); + } + } + error('Bad object'); + }; + + value = function () { + // Parse a JSON value. It could be an object, an array, a string, a number, + // or a word. + + white(); + switch (ch) { + case '{': + return object(); + case '[': + return array(); + case '"': + return string(); + case '-': + return number(); + default: + return ch >= '0' && ch <= '9' ? number() : word(); + } + }; + + // Return the json_parse function. It will have access to all of the above + // functions and variables. + + return function (source, reviver) { + var result; + + text = source + ''; + at = 0; + ch = ' '; + result = value(); + white(); + if (ch) { + error('Syntax error'); + } + + // If there is a reviver function, we recursively walk the new structure, + // passing each name/value pair to the reviver function for possible + // transformation, starting with a temporary root object that holds the result + // in an empty key. If there is not a reviver function, we simply return the + // result. + + return typeof reviver === 'function' + ? (function walk(holder, key) { + var k, + v, + value = holder[key]; + if (value && typeof value === 'object') { + Object.keys(value).forEach(function (k) { + v = walk(value, k); + if (v !== undefined) { + value[k] = v; + } else { + delete value[k]; + } + }); + } + return reviver.call(holder, key, value); + })({ '': result }, '') + : result; + }; +}; + +module.exports = json_parse; + + +/***/ }), + +/***/ 3651: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var BigNumber = __nccwpck_require__(1259); + +/* + json2.js + 2013-05-26 + + Public Domain. + + NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK. + + See http://www.JSON.org/js.html + + + This code should be minified before deployment. + See http://javascript.crockford.com/jsmin.html + + USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO + NOT CONTROL. + + + This file creates a global JSON object containing two methods: stringify + and parse. + + JSON.stringify(value, replacer, space) + value any JavaScript value, usually an object or array. + + replacer an optional parameter that determines how object + values are stringified for objects. It can be a + function or an array of strings. + + space an optional parameter that specifies the indentation + of nested structures. If it is omitted, the text will + be packed without extra whitespace. If it is a number, + it will specify the number of spaces to indent at each + level. If it is a string (such as '\t' or ' '), + it contains the characters used to indent at each level. + + This method produces a JSON text from a JavaScript value. + + When an object value is found, if the object contains a toJSON + method, its toJSON method will be called and the result will be + stringified. A toJSON method does not serialize: it returns the + value represented by the name/value pair that should be serialized, + or undefined if nothing should be serialized. The toJSON method + will be passed the key associated with the value, and this will be + bound to the value + + For example, this would serialize Dates as ISO strings. + + Date.prototype.toJSON = function (key) { + function f(n) { + // Format integers to have at least two digits. + return n < 10 ? '0' + n : n; + } + + return this.getUTCFullYear() + '-' + + f(this.getUTCMonth() + 1) + '-' + + f(this.getUTCDate()) + 'T' + + f(this.getUTCHours()) + ':' + + f(this.getUTCMinutes()) + ':' + + f(this.getUTCSeconds()) + 'Z'; + }; + + You can provide an optional replacer method. It will be passed the + key and value of each member, with this bound to the containing + object. The value that is returned from your method will be + serialized. If your method returns undefined, then the member will + be excluded from the serialization. + + If the replacer parameter is an array of strings, then it will be + used to select the members to be serialized. It filters the results + such that only members with keys listed in the replacer array are + stringified. + + Values that do not have JSON representations, such as undefined or + functions, will not be serialized. Such values in objects will be + dropped; in arrays they will be replaced with null. You can use + a replacer function to replace those with JSON values. + JSON.stringify(undefined) returns undefined. + + The optional space parameter produces a stringification of the + value that is filled with line breaks and indentation to make it + easier to read. + + If the space parameter is a non-empty string, then that string will + be used for indentation. If the space parameter is a number, then + the indentation will be that many spaces. + + Example: + + text = JSON.stringify(['e', {pluribus: 'unum'}]); + // text is '["e",{"pluribus":"unum"}]' + + + text = JSON.stringify(['e', {pluribus: 'unum'}], null, '\t'); + // text is '[\n\t"e",\n\t{\n\t\t"pluribus": "unum"\n\t}\n]' + + text = JSON.stringify([new Date()], function (key, value) { + return this[key] instanceof Date ? + 'Date(' + this[key] + ')' : value; + }); + // text is '["Date(---current time---)"]' + + + JSON.parse(text, reviver) + This method parses a JSON text to produce an object or array. + It can throw a SyntaxError exception. + + The optional reviver parameter is a function that can filter and + transform the results. It receives each of the keys and values, + and its return value is used instead of the original value. + If it returns what it received, then the structure is not modified. + If it returns undefined then the member is deleted. + + Example: + + // Parse the text. Values that look like ISO date strings will + // be converted to Date objects. + + myData = JSON.parse(text, function (key, value) { + var a; + if (typeof value === 'string') { + a = +/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value); + if (a) { + return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4], + +a[5], +a[6])); + } + } + return value; + }); + + myData = JSON.parse('["Date(09/09/2001)"]', function (key, value) { + var d; + if (typeof value === 'string' && + value.slice(0, 5) === 'Date(' && + value.slice(-1) === ')') { + d = new Date(value.slice(5, -1)); + if (d) { + return d; + } + } + return value; + }); + + + This is a reference implementation. You are free to copy, modify, or + redistribute. +*/ + +/*jslint evil: true, regexp: true */ + +/*members "", "\b", "\t", "\n", "\f", "\r", "\"", JSON, "\\", apply, + call, charCodeAt, getUTCDate, getUTCFullYear, getUTCHours, + getUTCMinutes, getUTCMonth, getUTCSeconds, hasOwnProperty, join, + lastIndex, length, parse, prototype, push, replace, slice, stringify, + test, toJSON, toString, valueOf +*/ + + +// Create a JSON object only if one does not already exist. We create the +// methods in a closure to avoid creating global variables. + +var JSON = module.exports; + +(function () { + 'use strict'; + + function f(n) { + // Format integers to have at least two digits. + return n < 10 ? '0' + n : n; + } + + var cx = /[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, + escapable = /[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, + gap, + indent, + meta = { // table of character substitutions + '\b': '\\b', + '\t': '\\t', + '\n': '\\n', + '\f': '\\f', + '\r': '\\r', + '"' : '\\"', + '\\': '\\\\' + }, + rep; + + + function quote(string) { + +// If the string contains no control characters, no quote characters, and no +// backslash characters, then we can safely slap some quotes around it. +// Otherwise we must also replace the offending characters with safe escape +// sequences. + + escapable.lastIndex = 0; + return escapable.test(string) ? '"' + string.replace(escapable, function (a) { + var c = meta[a]; + return typeof c === 'string' + ? c + : '\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4); + }) + '"' : '"' + string + '"'; + } + + + function str(key, holder) { + +// Produce a string from holder[key]. + + var i, // The loop counter. + k, // The member key. + v, // The member value. + length, + mind = gap, + partial, + value = holder[key], + isBigNumber = value != null && (value instanceof BigNumber || BigNumber.isBigNumber(value)); + +// If the value has a toJSON method, call it to obtain a replacement value. + + if (value && typeof value === 'object' && + typeof value.toJSON === 'function') { + value = value.toJSON(key); + } + +// If we were called with a replacer function, then call the replacer to +// obtain a replacement value. + + if (typeof rep === 'function') { + value = rep.call(holder, key, value); + } + +// What happens next depends on the value's type. + + switch (typeof value) { + case 'string': + if (isBigNumber) { + return value; + } else { + return quote(value); + } + + case 'number': + +// JSON numbers must be finite. Encode non-finite numbers as null. + + return isFinite(value) ? String(value) : 'null'; + + case 'boolean': + case 'null': + case 'bigint': + +// If the value is a boolean or null, convert it to a string. Note: +// typeof null does not produce 'null'. The case is included here in +// the remote chance that this gets fixed someday. + + return String(value); + +// If the type is 'object', we might be dealing with an object or an array or +// null. + + case 'object': + +// Due to a specification blunder in ECMAScript, typeof null is 'object', +// so watch out for that case. + + if (!value) { + return 'null'; + } + +// Make an array to hold the partial results of stringifying this object value. + + gap += indent; + partial = []; + +// Is the value an array? + + if (Object.prototype.toString.apply(value) === '[object Array]') { + +// The value is an array. Stringify every element. Use null as a placeholder +// for non-JSON values. + + length = value.length; + for (i = 0; i < length; i += 1) { + partial[i] = str(i, value) || 'null'; + } + +// Join all of the elements together, separated with commas, and wrap them in +// brackets. + + v = partial.length === 0 + ? '[]' + : gap + ? '[\n' + gap + partial.join(',\n' + gap) + '\n' + mind + ']' + : '[' + partial.join(',') + ']'; + gap = mind; + return v; + } + +// If the replacer is an array, use it to select the members to be stringified. + + if (rep && typeof rep === 'object') { + length = rep.length; + for (i = 0; i < length; i += 1) { + if (typeof rep[i] === 'string') { + k = rep[i]; + v = str(k, value); + if (v) { + partial.push(quote(k) + (gap ? ': ' : ':') + v); + } + } + } + } else { + +// Otherwise, iterate through all of the keys in the object. + + Object.keys(value).forEach(function(k) { + var v = str(k, value); + if (v) { + partial.push(quote(k) + (gap ? ': ' : ':') + v); + } + }); + } + +// Join all of the member texts together, separated with commas, +// and wrap them in braces. + + v = partial.length === 0 + ? '{}' + : gap + ? '{\n' + gap + partial.join(',\n' + gap) + '\n' + mind + '}' + : '{' + partial.join(',') + '}'; + gap = mind; + return v; + } + } + +// If the JSON object does not yet have a stringify method, give it one. + + if (typeof JSON.stringify !== 'function') { + JSON.stringify = function (value, replacer, space) { + +// The stringify method takes a value and an optional replacer, and an optional +// space parameter, and returns a JSON text. The replacer can be a function +// that can replace values, or an array of strings that will select the keys. +// A default replacer method can be provided. Use of the space parameter can +// produce text that is more easily readable. + + var i; + gap = ''; + indent = ''; + +// If the space parameter is a number, make an indent string containing that +// many spaces. + + if (typeof space === 'number') { + for (i = 0; i < space; i += 1) { + indent += ' '; + } + +// If the space parameter is a string, it will be used as the indent string. + + } else if (typeof space === 'string') { + indent = space; + } + +// If there is a replacer, it must be a function or an array. +// Otherwise, throw an error. + + rep = replacer; + if (replacer && typeof replacer !== 'function' && + (typeof replacer !== 'object' || + typeof replacer.length !== 'number')) { + throw new Error('JSON.stringify'); + } + +// Make a fake root object containing our value under the key of ''. +// Return the result of stringifying the value. + + return str('', {'': value}); + }; + } +}()); + + +/***/ }), + +/***/ 8622: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var bufferEqual = __nccwpck_require__(9732); +var Buffer = (__nccwpck_require__(3058).Buffer); +var crypto = __nccwpck_require__(6982); +var formatEcdsa = __nccwpck_require__(325); +var util = __nccwpck_require__(9023); + +var MSG_INVALID_ALGORITHM = '"%s" is not a valid algorithm.\n Supported algorithms are:\n "HS256", "HS384", "HS512", "RS256", "RS384", "RS512", "PS256", "PS384", "PS512", "ES256", "ES384", "ES512" and "none".' +var MSG_INVALID_SECRET = 'secret must be a string or buffer'; +var MSG_INVALID_VERIFIER_KEY = 'key must be a string or a buffer'; +var MSG_INVALID_SIGNER_KEY = 'key must be a string, a buffer or an object'; + +var supportsKeyObjects = typeof crypto.createPublicKey === 'function'; +if (supportsKeyObjects) { + MSG_INVALID_VERIFIER_KEY += ' or a KeyObject'; + MSG_INVALID_SECRET += 'or a KeyObject'; +} + +function checkIsPublicKey(key) { + if (Buffer.isBuffer(key)) { + return; + } + + if (typeof key === 'string') { + return; + } + + if (!supportsKeyObjects) { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key !== 'object') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key.type !== 'string') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key.asymmetricKeyType !== 'string') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } + + if (typeof key.export !== 'function') { + throw typeError(MSG_INVALID_VERIFIER_KEY); + } +}; + +function checkIsPrivateKey(key) { + if (Buffer.isBuffer(key)) { + return; + } + + if (typeof key === 'string') { + return; + } + + if (typeof key === 'object') { + return; + } + + throw typeError(MSG_INVALID_SIGNER_KEY); +}; + +function checkIsSecretKey(key) { + if (Buffer.isBuffer(key)) { + return; + } + + if (typeof key === 'string') { + return key; + } + + if (!supportsKeyObjects) { + throw typeError(MSG_INVALID_SECRET); + } + + if (typeof key !== 'object') { + throw typeError(MSG_INVALID_SECRET); + } + + if (key.type !== 'secret') { + throw typeError(MSG_INVALID_SECRET); + } + + if (typeof key.export !== 'function') { + throw typeError(MSG_INVALID_SECRET); + } +} + +function fromBase64(base64) { + return base64 + .replace(/=/g, '') + .replace(/\+/g, '-') + .replace(/\//g, '_'); +} + +function toBase64(base64url) { + base64url = base64url.toString(); + + var padding = 4 - base64url.length % 4; + if (padding !== 4) { + for (var i = 0; i < padding; ++i) { + base64url += '='; + } + } + + return base64url + .replace(/\-/g, '+') + .replace(/_/g, '/'); +} + +function typeError(template) { + var args = [].slice.call(arguments, 1); + var errMsg = util.format.bind(util, template).apply(null, args); + return new TypeError(errMsg); +} + +function bufferOrString(obj) { + return Buffer.isBuffer(obj) || typeof obj === 'string'; +} + +function normalizeInput(thing) { + if (!bufferOrString(thing)) + thing = JSON.stringify(thing); + return thing; +} + +function createHmacSigner(bits) { + return function sign(thing, secret) { + checkIsSecretKey(secret); + thing = normalizeInput(thing); + var hmac = crypto.createHmac('sha' + bits, secret); + var sig = (hmac.update(thing), hmac.digest('base64')) + return fromBase64(sig); + } +} + +function createHmacVerifier(bits) { + return function verify(thing, signature, secret) { + var computedSig = createHmacSigner(bits)(thing, secret); + return bufferEqual(Buffer.from(signature), Buffer.from(computedSig)); + } +} + +function createKeySigner(bits) { + return function sign(thing, privateKey) { + checkIsPrivateKey(privateKey); + thing = normalizeInput(thing); + // Even though we are specifying "RSA" here, this works with ECDSA + // keys as well. + var signer = crypto.createSign('RSA-SHA' + bits); + var sig = (signer.update(thing), signer.sign(privateKey, 'base64')); + return fromBase64(sig); + } +} + +function createKeyVerifier(bits) { + return function verify(thing, signature, publicKey) { + checkIsPublicKey(publicKey); + thing = normalizeInput(thing); + signature = toBase64(signature); + var verifier = crypto.createVerify('RSA-SHA' + bits); + verifier.update(thing); + return verifier.verify(publicKey, signature, 'base64'); + } +} + +function createPSSKeySigner(bits) { + return function sign(thing, privateKey) { + checkIsPrivateKey(privateKey); + thing = normalizeInput(thing); + var signer = crypto.createSign('RSA-SHA' + bits); + var sig = (signer.update(thing), signer.sign({ + key: privateKey, + padding: crypto.constants.RSA_PKCS1_PSS_PADDING, + saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST + }, 'base64')); + return fromBase64(sig); + } +} + +function createPSSKeyVerifier(bits) { + return function verify(thing, signature, publicKey) { + checkIsPublicKey(publicKey); + thing = normalizeInput(thing); + signature = toBase64(signature); + var verifier = crypto.createVerify('RSA-SHA' + bits); + verifier.update(thing); + return verifier.verify({ + key: publicKey, + padding: crypto.constants.RSA_PKCS1_PSS_PADDING, + saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST + }, signature, 'base64'); + } +} + +function createECDSASigner(bits) { + var inner = createKeySigner(bits); + return function sign() { + var signature = inner.apply(null, arguments); + signature = formatEcdsa.derToJose(signature, 'ES' + bits); + return signature; + }; +} + +function createECDSAVerifer(bits) { + var inner = createKeyVerifier(bits); + return function verify(thing, signature, publicKey) { + signature = formatEcdsa.joseToDer(signature, 'ES' + bits).toString('base64'); + var result = inner(thing, signature, publicKey); + return result; + }; +} + +function createNoneSigner() { + return function sign() { + return ''; + } +} + +function createNoneVerifier() { + return function verify(thing, signature) { + return signature === ''; + } +} + +module.exports = function jwa(algorithm) { + var signerFactories = { + hs: createHmacSigner, + rs: createKeySigner, + ps: createPSSKeySigner, + es: createECDSASigner, + none: createNoneSigner, + } + var verifierFactories = { + hs: createHmacVerifier, + rs: createKeyVerifier, + ps: createPSSKeyVerifier, + es: createECDSAVerifer, + none: createNoneVerifier, + } + var match = algorithm.match(/^(RS|PS|ES|HS)(256|384|512)$|^(none)$/); + if (!match) + throw typeError(MSG_INVALID_ALGORITHM, algorithm); + var algo = (match[1] || match[3]).toLowerCase(); + var bits = match[2]; + + return { + sign: signerFactories[algo](bits), + verify: verifierFactories[algo](bits), + } +}; + + +/***/ }), + +/***/ 3324: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +/*global exports*/ +var SignStream = __nccwpck_require__(8600); +var VerifyStream = __nccwpck_require__(4368); + +var ALGORITHMS = [ + 'HS256', 'HS384', 'HS512', + 'RS256', 'RS384', 'RS512', + 'PS256', 'PS384', 'PS512', + 'ES256', 'ES384', 'ES512' +]; + +exports.ALGORITHMS = ALGORITHMS; +exports.sign = SignStream.sign; +exports.verify = VerifyStream.verify; +exports.decode = VerifyStream.decode; +exports.isValid = VerifyStream.isValid; +exports.createSign = function createSign(opts) { + return new SignStream(opts); +}; +exports.createVerify = function createVerify(opts) { + return new VerifyStream(opts); +}; + + +/***/ }), + +/***/ 1831: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/*global module, process*/ +var Buffer = (__nccwpck_require__(3058).Buffer); +var Stream = __nccwpck_require__(2203); +var util = __nccwpck_require__(9023); + +function DataStream(data) { + this.buffer = null; + this.writable = true; + this.readable = true; + + // No input + if (!data) { + this.buffer = Buffer.alloc(0); + return this; + } + + // Stream + if (typeof data.pipe === 'function') { + this.buffer = Buffer.alloc(0); + data.pipe(this); + return this; + } + + // Buffer or String + // or Object (assumedly a passworded key) + if (data.length || typeof data === 'object') { + this.buffer = data; + this.writable = false; + process.nextTick(function () { + this.emit('end', data); + this.readable = false; + this.emit('close'); + }.bind(this)); + return this; + } + + throw new TypeError('Unexpected data type ('+ typeof data + ')'); +} +util.inherits(DataStream, Stream); + +DataStream.prototype.write = function write(data) { + this.buffer = Buffer.concat([this.buffer, Buffer.from(data)]); + this.emit('data', data); +}; + +DataStream.prototype.end = function end(data) { + if (data) + this.write(data); + this.emit('end', data); + this.emit('close'); + this.writable = false; + this.readable = false; +}; + +module.exports = DataStream; + + +/***/ }), + +/***/ 8600: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/*global module*/ +var Buffer = (__nccwpck_require__(3058).Buffer); +var DataStream = __nccwpck_require__(1831); +var jwa = __nccwpck_require__(8622); +var Stream = __nccwpck_require__(2203); +var toString = __nccwpck_require__(5126); +var util = __nccwpck_require__(9023); + +function base64url(string, encoding) { + return Buffer + .from(string, encoding) + .toString('base64') + .replace(/=/g, '') + .replace(/\+/g, '-') + .replace(/\//g, '_'); +} + +function jwsSecuredInput(header, payload, encoding) { + encoding = encoding || 'utf8'; + var encodedHeader = base64url(toString(header), 'binary'); + var encodedPayload = base64url(toString(payload), encoding); + return util.format('%s.%s', encodedHeader, encodedPayload); +} + +function jwsSign(opts) { + var header = opts.header; + var payload = opts.payload; + var secretOrKey = opts.secret || opts.privateKey; + var encoding = opts.encoding; + var algo = jwa(header.alg); + var securedInput = jwsSecuredInput(header, payload, encoding); + var signature = algo.sign(securedInput, secretOrKey); + return util.format('%s.%s', securedInput, signature); +} + +function SignStream(opts) { + var secret = opts.secret||opts.privateKey||opts.key; + var secretStream = new DataStream(secret); + this.readable = true; + this.header = opts.header; + this.encoding = opts.encoding; + this.secret = this.privateKey = this.key = secretStream; + this.payload = new DataStream(opts.payload); + this.secret.once('close', function () { + if (!this.payload.writable && this.readable) + this.sign(); + }.bind(this)); + + this.payload.once('close', function () { + if (!this.secret.writable && this.readable) + this.sign(); + }.bind(this)); +} +util.inherits(SignStream, Stream); + +SignStream.prototype.sign = function sign() { + try { + var signature = jwsSign({ + header: this.header, + payload: this.payload.buffer, + secret: this.secret.buffer, + encoding: this.encoding + }); + this.emit('done', signature); + this.emit('data', signature); + this.emit('end'); + this.readable = false; + return signature; + } catch (e) { + this.readable = false; + this.emit('error', e); + this.emit('close'); + } +}; + +SignStream.sign = jwsSign; + +module.exports = SignStream; + + +/***/ }), + +/***/ 5126: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/*global module*/ +var Buffer = (__nccwpck_require__(181).Buffer); + +module.exports = function toString(obj) { + if (typeof obj === 'string') + return obj; + if (typeof obj === 'number' || Buffer.isBuffer(obj)) + return obj.toString(); + return JSON.stringify(obj); +}; + + +/***/ }), + +/***/ 4368: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/*global module*/ +var Buffer = (__nccwpck_require__(3058).Buffer); +var DataStream = __nccwpck_require__(1831); +var jwa = __nccwpck_require__(8622); +var Stream = __nccwpck_require__(2203); +var toString = __nccwpck_require__(5126); +var util = __nccwpck_require__(9023); +var JWS_REGEX = /^[a-zA-Z0-9\-_]+?\.[a-zA-Z0-9\-_]+?\.([a-zA-Z0-9\-_]+)?$/; + +function isObject(thing) { + return Object.prototype.toString.call(thing) === '[object Object]'; +} + +function safeJsonParse(thing) { + if (isObject(thing)) + return thing; + try { return JSON.parse(thing); } + catch (e) { return undefined; } +} + +function headerFromJWS(jwsSig) { + var encodedHeader = jwsSig.split('.', 1)[0]; + return safeJsonParse(Buffer.from(encodedHeader, 'base64').toString('binary')); +} + +function securedInputFromJWS(jwsSig) { + return jwsSig.split('.', 2).join('.'); +} + +function signatureFromJWS(jwsSig) { + return jwsSig.split('.')[2]; +} + +function payloadFromJWS(jwsSig, encoding) { + encoding = encoding || 'utf8'; + var payload = jwsSig.split('.')[1]; + return Buffer.from(payload, 'base64').toString(encoding); +} + +function isValidJws(string) { + return JWS_REGEX.test(string) && !!headerFromJWS(string); +} + +function jwsVerify(jwsSig, algorithm, secretOrKey) { + if (!algorithm) { + var err = new Error("Missing algorithm parameter for jws.verify"); + err.code = "MISSING_ALGORITHM"; + throw err; + } + jwsSig = toString(jwsSig); + var signature = signatureFromJWS(jwsSig); + var securedInput = securedInputFromJWS(jwsSig); + var algo = jwa(algorithm); + return algo.verify(securedInput, signature, secretOrKey); +} + +function jwsDecode(jwsSig, opts) { + opts = opts || {}; + jwsSig = toString(jwsSig); + + if (!isValidJws(jwsSig)) + return null; + + var header = headerFromJWS(jwsSig); + + if (!header) + return null; + + var payload = payloadFromJWS(jwsSig); + if (header.typ === 'JWT' || opts.json) + payload = JSON.parse(payload, opts.encoding); + + return { + header: header, + payload: payload, + signature: signatureFromJWS(jwsSig) + }; +} + +function VerifyStream(opts) { + opts = opts || {}; + var secretOrKey = opts.secret||opts.publicKey||opts.key; + var secretStream = new DataStream(secretOrKey); + this.readable = true; + this.algorithm = opts.algorithm; + this.encoding = opts.encoding; + this.secret = this.publicKey = this.key = secretStream; + this.signature = new DataStream(opts.signature); + this.secret.once('close', function () { + if (!this.signature.writable && this.readable) + this.verify(); + }.bind(this)); + + this.signature.once('close', function () { + if (!this.secret.writable && this.readable) + this.verify(); + }.bind(this)); +} +util.inherits(VerifyStream, Stream); +VerifyStream.prototype.verify = function verify() { + try { + var valid = jwsVerify(this.signature.buffer, this.algorithm, this.key.buffer); + var obj = jwsDecode(this.signature.buffer, this.encoding); + this.emit('done', valid, obj); + this.emit('data', valid); + this.emit('end'); + this.readable = false; + return valid; + } catch (e) { + this.readable = false; + this.emit('error', e); + this.emit('close'); + } +}; + +VerifyStream.decode = jwsDecode; +VerifyStream.isValid = isValidJws; +VerifyStream.verify = jwsVerify; + +module.exports = VerifyStream; + + /***/ }), /***/ 9829: @@ -51564,6 +71403,137 @@ function populateMaps (extensions, types) { } +/***/ }), + +/***/ 4402: +/***/ ((module) => { + +"use strict"; + + +/** + * @param typeMap [Object] Map of MIME type -> Array[extensions] + * @param ... + */ +function Mime() { + this._types = Object.create(null); + this._extensions = Object.create(null); + + for (let i = 0; i < arguments.length; i++) { + this.define(arguments[i]); + } + + this.define = this.define.bind(this); + this.getType = this.getType.bind(this); + this.getExtension = this.getExtension.bind(this); +} + +/** + * Define mimetype -> extension mappings. Each key is a mime-type that maps + * to an array of extensions associated with the type. The first extension is + * used as the default extension for the type. + * + * e.g. mime.define({'audio/ogg', ['oga', 'ogg', 'spx']}); + * + * If a type declares an extension that has already been defined, an error will + * be thrown. To suppress this error and force the extension to be associated + * with the new type, pass `force`=true. Alternatively, you may prefix the + * extension with "*" to map the type to extension, without mapping the + * extension to the type. + * + * e.g. mime.define({'audio/wav', ['wav']}, {'audio/x-wav', ['*wav']}); + * + * + * @param map (Object) type definitions + * @param force (Boolean) if true, force overriding of existing definitions + */ +Mime.prototype.define = function(typeMap, force) { + for (let type in typeMap) { + let extensions = typeMap[type].map(function(t) { + return t.toLowerCase(); + }); + type = type.toLowerCase(); + + for (let i = 0; i < extensions.length; i++) { + const ext = extensions[i]; + + // '*' prefix = not the preferred type for this extension. So fixup the + // extension, and skip it. + if (ext[0] === '*') { + continue; + } + + if (!force && (ext in this._types)) { + throw new Error( + 'Attempt to change mapping for "' + ext + + '" extension from "' + this._types[ext] + '" to "' + type + + '". Pass `force=true` to allow this, otherwise remove "' + ext + + '" from the list of extensions for "' + type + '".' + ); + } + + this._types[ext] = type; + } + + // Use first extension as default + if (force || !this._extensions[type]) { + const ext = extensions[0]; + this._extensions[type] = (ext[0] !== '*') ? ext : ext.substr(1); + } + } +}; + +/** + * Lookup a mime type based on extension + */ +Mime.prototype.getType = function(path) { + path = String(path); + let last = path.replace(/^.*[/\\]/, '').toLowerCase(); + let ext = last.replace(/^.*\./, '').toLowerCase(); + + let hasPath = last.length < path.length; + let hasDot = ext.length < last.length - 1; + + return (hasDot || !hasPath) && this._types[ext] || null; +}; + +/** + * Return file extension associated with a mime type + */ +Mime.prototype.getExtension = function(type) { + type = /^\s*([^;\s]*)/.test(type) && RegExp.$1; + return type && this._extensions[type.toLowerCase()] || null; +}; + +module.exports = Mime; + + +/***/ }), + +/***/ 4900: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +let Mime = __nccwpck_require__(4402); +module.exports = new Mime(__nccwpck_require__(3725), __nccwpck_require__(8548)); + + +/***/ }), + +/***/ 8548: +/***/ ((module) => { + +module.exports = {"application/prs.cww":["cww"],"application/vnd.1000minds.decision-model+xml":["1km"],"application/vnd.3gpp.pic-bw-large":["plb"],"application/vnd.3gpp.pic-bw-small":["psb"],"application/vnd.3gpp.pic-bw-var":["pvb"],"application/vnd.3gpp2.tcap":["tcap"],"application/vnd.3m.post-it-notes":["pwn"],"application/vnd.accpac.simply.aso":["aso"],"application/vnd.accpac.simply.imp":["imp"],"application/vnd.acucobol":["acu"],"application/vnd.acucorp":["atc","acutc"],"application/vnd.adobe.air-application-installer-package+zip":["air"],"application/vnd.adobe.formscentral.fcdt":["fcdt"],"application/vnd.adobe.fxp":["fxp","fxpl"],"application/vnd.adobe.xdp+xml":["xdp"],"application/vnd.adobe.xfdf":["xfdf"],"application/vnd.ahead.space":["ahead"],"application/vnd.airzip.filesecure.azf":["azf"],"application/vnd.airzip.filesecure.azs":["azs"],"application/vnd.amazon.ebook":["azw"],"application/vnd.americandynamics.acc":["acc"],"application/vnd.amiga.ami":["ami"],"application/vnd.android.package-archive":["apk"],"application/vnd.anser-web-certificate-issue-initiation":["cii"],"application/vnd.anser-web-funds-transfer-initiation":["fti"],"application/vnd.antix.game-component":["atx"],"application/vnd.apple.installer+xml":["mpkg"],"application/vnd.apple.keynote":["key"],"application/vnd.apple.mpegurl":["m3u8"],"application/vnd.apple.numbers":["numbers"],"application/vnd.apple.pages":["pages"],"application/vnd.apple.pkpass":["pkpass"],"application/vnd.aristanetworks.swi":["swi"],"application/vnd.astraea-software.iota":["iota"],"application/vnd.audiograph":["aep"],"application/vnd.balsamiq.bmml+xml":["bmml"],"application/vnd.blueice.multipass":["mpm"],"application/vnd.bmi":["bmi"],"application/vnd.businessobjects":["rep"],"application/vnd.chemdraw+xml":["cdxml"],"application/vnd.chipnuts.karaoke-mmd":["mmd"],"application/vnd.cinderella":["cdy"],"application/vnd.citationstyles.style+xml":["csl"],"application/vnd.claymore":["cla"],"application/vnd.cloanto.rp9":["rp9"],"application/vnd.clonk.c4group":["c4g","c4d","c4f","c4p","c4u"],"application/vnd.cluetrust.cartomobile-config":["c11amc"],"application/vnd.cluetrust.cartomobile-config-pkg":["c11amz"],"application/vnd.commonspace":["csp"],"application/vnd.contact.cmsg":["cdbcmsg"],"application/vnd.cosmocaller":["cmc"],"application/vnd.crick.clicker":["clkx"],"application/vnd.crick.clicker.keyboard":["clkk"],"application/vnd.crick.clicker.palette":["clkp"],"application/vnd.crick.clicker.template":["clkt"],"application/vnd.crick.clicker.wordbank":["clkw"],"application/vnd.criticaltools.wbs+xml":["wbs"],"application/vnd.ctc-posml":["pml"],"application/vnd.cups-ppd":["ppd"],"application/vnd.curl.car":["car"],"application/vnd.curl.pcurl":["pcurl"],"application/vnd.dart":["dart"],"application/vnd.data-vision.rdz":["rdz"],"application/vnd.dbf":["dbf"],"application/vnd.dece.data":["uvf","uvvf","uvd","uvvd"],"application/vnd.dece.ttml+xml":["uvt","uvvt"],"application/vnd.dece.unspecified":["uvx","uvvx"],"application/vnd.dece.zip":["uvz","uvvz"],"application/vnd.denovo.fcselayout-link":["fe_launch"],"application/vnd.dna":["dna"],"application/vnd.dolby.mlp":["mlp"],"application/vnd.dpgraph":["dpg"],"application/vnd.dreamfactory":["dfac"],"application/vnd.ds-keypoint":["kpxx"],"application/vnd.dvb.ait":["ait"],"application/vnd.dvb.service":["svc"],"application/vnd.dynageo":["geo"],"application/vnd.ecowin.chart":["mag"],"application/vnd.enliven":["nml"],"application/vnd.epson.esf":["esf"],"application/vnd.epson.msf":["msf"],"application/vnd.epson.quickanime":["qam"],"application/vnd.epson.salt":["slt"],"application/vnd.epson.ssf":["ssf"],"application/vnd.eszigno3+xml":["es3","et3"],"application/vnd.ezpix-album":["ez2"],"application/vnd.ezpix-package":["ez3"],"application/vnd.fdf":["fdf"],"application/vnd.fdsn.mseed":["mseed"],"application/vnd.fdsn.seed":["seed","dataless"],"application/vnd.flographit":["gph"],"application/vnd.fluxtime.clip":["ftc"],"application/vnd.framemaker":["fm","frame","maker","book"],"application/vnd.frogans.fnc":["fnc"],"application/vnd.frogans.ltf":["ltf"],"application/vnd.fsc.weblaunch":["fsc"],"application/vnd.fujitsu.oasys":["oas"],"application/vnd.fujitsu.oasys2":["oa2"],"application/vnd.fujitsu.oasys3":["oa3"],"application/vnd.fujitsu.oasysgp":["fg5"],"application/vnd.fujitsu.oasysprs":["bh2"],"application/vnd.fujixerox.ddd":["ddd"],"application/vnd.fujixerox.docuworks":["xdw"],"application/vnd.fujixerox.docuworks.binder":["xbd"],"application/vnd.fuzzysheet":["fzs"],"application/vnd.genomatix.tuxedo":["txd"],"application/vnd.geogebra.file":["ggb"],"application/vnd.geogebra.tool":["ggt"],"application/vnd.geometry-explorer":["gex","gre"],"application/vnd.geonext":["gxt"],"application/vnd.geoplan":["g2w"],"application/vnd.geospace":["g3w"],"application/vnd.gmx":["gmx"],"application/vnd.google-apps.document":["gdoc"],"application/vnd.google-apps.presentation":["gslides"],"application/vnd.google-apps.spreadsheet":["gsheet"],"application/vnd.google-earth.kml+xml":["kml"],"application/vnd.google-earth.kmz":["kmz"],"application/vnd.grafeq":["gqf","gqs"],"application/vnd.groove-account":["gac"],"application/vnd.groove-help":["ghf"],"application/vnd.groove-identity-message":["gim"],"application/vnd.groove-injector":["grv"],"application/vnd.groove-tool-message":["gtm"],"application/vnd.groove-tool-template":["tpl"],"application/vnd.groove-vcard":["vcg"],"application/vnd.hal+xml":["hal"],"application/vnd.handheld-entertainment+xml":["zmm"],"application/vnd.hbci":["hbci"],"application/vnd.hhe.lesson-player":["les"],"application/vnd.hp-hpgl":["hpgl"],"application/vnd.hp-hpid":["hpid"],"application/vnd.hp-hps":["hps"],"application/vnd.hp-jlyt":["jlt"],"application/vnd.hp-pcl":["pcl"],"application/vnd.hp-pclxl":["pclxl"],"application/vnd.hydrostatix.sof-data":["sfd-hdstx"],"application/vnd.ibm.minipay":["mpy"],"application/vnd.ibm.modcap":["afp","listafp","list3820"],"application/vnd.ibm.rights-management":["irm"],"application/vnd.ibm.secure-container":["sc"],"application/vnd.iccprofile":["icc","icm"],"application/vnd.igloader":["igl"],"application/vnd.immervision-ivp":["ivp"],"application/vnd.immervision-ivu":["ivu"],"application/vnd.insors.igm":["igm"],"application/vnd.intercon.formnet":["xpw","xpx"],"application/vnd.intergeo":["i2g"],"application/vnd.intu.qbo":["qbo"],"application/vnd.intu.qfx":["qfx"],"application/vnd.ipunplugged.rcprofile":["rcprofile"],"application/vnd.irepository.package+xml":["irp"],"application/vnd.is-xpr":["xpr"],"application/vnd.isac.fcs":["fcs"],"application/vnd.jam":["jam"],"application/vnd.jcp.javame.midlet-rms":["rms"],"application/vnd.jisp":["jisp"],"application/vnd.joost.joda-archive":["joda"],"application/vnd.kahootz":["ktz","ktr"],"application/vnd.kde.karbon":["karbon"],"application/vnd.kde.kchart":["chrt"],"application/vnd.kde.kformula":["kfo"],"application/vnd.kde.kivio":["flw"],"application/vnd.kde.kontour":["kon"],"application/vnd.kde.kpresenter":["kpr","kpt"],"application/vnd.kde.kspread":["ksp"],"application/vnd.kde.kword":["kwd","kwt"],"application/vnd.kenameaapp":["htke"],"application/vnd.kidspiration":["kia"],"application/vnd.kinar":["kne","knp"],"application/vnd.koan":["skp","skd","skt","skm"],"application/vnd.kodak-descriptor":["sse"],"application/vnd.las.las+xml":["lasxml"],"application/vnd.llamagraphics.life-balance.desktop":["lbd"],"application/vnd.llamagraphics.life-balance.exchange+xml":["lbe"],"application/vnd.lotus-1-2-3":["123"],"application/vnd.lotus-approach":["apr"],"application/vnd.lotus-freelance":["pre"],"application/vnd.lotus-notes":["nsf"],"application/vnd.lotus-organizer":["org"],"application/vnd.lotus-screencam":["scm"],"application/vnd.lotus-wordpro":["lwp"],"application/vnd.macports.portpkg":["portpkg"],"application/vnd.mapbox-vector-tile":["mvt"],"application/vnd.mcd":["mcd"],"application/vnd.medcalcdata":["mc1"],"application/vnd.mediastation.cdkey":["cdkey"],"application/vnd.mfer":["mwf"],"application/vnd.mfmp":["mfm"],"application/vnd.micrografx.flo":["flo"],"application/vnd.micrografx.igx":["igx"],"application/vnd.mif":["mif"],"application/vnd.mobius.daf":["daf"],"application/vnd.mobius.dis":["dis"],"application/vnd.mobius.mbk":["mbk"],"application/vnd.mobius.mqy":["mqy"],"application/vnd.mobius.msl":["msl"],"application/vnd.mobius.plc":["plc"],"application/vnd.mobius.txf":["txf"],"application/vnd.mophun.application":["mpn"],"application/vnd.mophun.certificate":["mpc"],"application/vnd.mozilla.xul+xml":["xul"],"application/vnd.ms-artgalry":["cil"],"application/vnd.ms-cab-compressed":["cab"],"application/vnd.ms-excel":["xls","xlm","xla","xlc","xlt","xlw"],"application/vnd.ms-excel.addin.macroenabled.12":["xlam"],"application/vnd.ms-excel.sheet.binary.macroenabled.12":["xlsb"],"application/vnd.ms-excel.sheet.macroenabled.12":["xlsm"],"application/vnd.ms-excel.template.macroenabled.12":["xltm"],"application/vnd.ms-fontobject":["eot"],"application/vnd.ms-htmlhelp":["chm"],"application/vnd.ms-ims":["ims"],"application/vnd.ms-lrm":["lrm"],"application/vnd.ms-officetheme":["thmx"],"application/vnd.ms-outlook":["msg"],"application/vnd.ms-pki.seccat":["cat"],"application/vnd.ms-pki.stl":["*stl"],"application/vnd.ms-powerpoint":["ppt","pps","pot"],"application/vnd.ms-powerpoint.addin.macroenabled.12":["ppam"],"application/vnd.ms-powerpoint.presentation.macroenabled.12":["pptm"],"application/vnd.ms-powerpoint.slide.macroenabled.12":["sldm"],"application/vnd.ms-powerpoint.slideshow.macroenabled.12":["ppsm"],"application/vnd.ms-powerpoint.template.macroenabled.12":["potm"],"application/vnd.ms-project":["mpp","mpt"],"application/vnd.ms-word.document.macroenabled.12":["docm"],"application/vnd.ms-word.template.macroenabled.12":["dotm"],"application/vnd.ms-works":["wps","wks","wcm","wdb"],"application/vnd.ms-wpl":["wpl"],"application/vnd.ms-xpsdocument":["xps"],"application/vnd.mseq":["mseq"],"application/vnd.musician":["mus"],"application/vnd.muvee.style":["msty"],"application/vnd.mynfc":["taglet"],"application/vnd.neurolanguage.nlu":["nlu"],"application/vnd.nitf":["ntf","nitf"],"application/vnd.noblenet-directory":["nnd"],"application/vnd.noblenet-sealer":["nns"],"application/vnd.noblenet-web":["nnw"],"application/vnd.nokia.n-gage.ac+xml":["*ac"],"application/vnd.nokia.n-gage.data":["ngdat"],"application/vnd.nokia.n-gage.symbian.install":["n-gage"],"application/vnd.nokia.radio-preset":["rpst"],"application/vnd.nokia.radio-presets":["rpss"],"application/vnd.novadigm.edm":["edm"],"application/vnd.novadigm.edx":["edx"],"application/vnd.novadigm.ext":["ext"],"application/vnd.oasis.opendocument.chart":["odc"],"application/vnd.oasis.opendocument.chart-template":["otc"],"application/vnd.oasis.opendocument.database":["odb"],"application/vnd.oasis.opendocument.formula":["odf"],"application/vnd.oasis.opendocument.formula-template":["odft"],"application/vnd.oasis.opendocument.graphics":["odg"],"application/vnd.oasis.opendocument.graphics-template":["otg"],"application/vnd.oasis.opendocument.image":["odi"],"application/vnd.oasis.opendocument.image-template":["oti"],"application/vnd.oasis.opendocument.presentation":["odp"],"application/vnd.oasis.opendocument.presentation-template":["otp"],"application/vnd.oasis.opendocument.spreadsheet":["ods"],"application/vnd.oasis.opendocument.spreadsheet-template":["ots"],"application/vnd.oasis.opendocument.text":["odt"],"application/vnd.oasis.opendocument.text-master":["odm"],"application/vnd.oasis.opendocument.text-template":["ott"],"application/vnd.oasis.opendocument.text-web":["oth"],"application/vnd.olpc-sugar":["xo"],"application/vnd.oma.dd2+xml":["dd2"],"application/vnd.openblox.game+xml":["obgx"],"application/vnd.openofficeorg.extension":["oxt"],"application/vnd.openstreetmap.data+xml":["osm"],"application/vnd.openxmlformats-officedocument.presentationml.presentation":["pptx"],"application/vnd.openxmlformats-officedocument.presentationml.slide":["sldx"],"application/vnd.openxmlformats-officedocument.presentationml.slideshow":["ppsx"],"application/vnd.openxmlformats-officedocument.presentationml.template":["potx"],"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet":["xlsx"],"application/vnd.openxmlformats-officedocument.spreadsheetml.template":["xltx"],"application/vnd.openxmlformats-officedocument.wordprocessingml.document":["docx"],"application/vnd.openxmlformats-officedocument.wordprocessingml.template":["dotx"],"application/vnd.osgeo.mapguide.package":["mgp"],"application/vnd.osgi.dp":["dp"],"application/vnd.osgi.subsystem":["esa"],"application/vnd.palm":["pdb","pqa","oprc"],"application/vnd.pawaafile":["paw"],"application/vnd.pg.format":["str"],"application/vnd.pg.osasli":["ei6"],"application/vnd.picsel":["efif"],"application/vnd.pmi.widget":["wg"],"application/vnd.pocketlearn":["plf"],"application/vnd.powerbuilder6":["pbd"],"application/vnd.previewsystems.box":["box"],"application/vnd.proteus.magazine":["mgz"],"application/vnd.publishare-delta-tree":["qps"],"application/vnd.pvi.ptid1":["ptid"],"application/vnd.quark.quarkxpress":["qxd","qxt","qwd","qwt","qxl","qxb"],"application/vnd.rar":["rar"],"application/vnd.realvnc.bed":["bed"],"application/vnd.recordare.musicxml":["mxl"],"application/vnd.recordare.musicxml+xml":["musicxml"],"application/vnd.rig.cryptonote":["cryptonote"],"application/vnd.rim.cod":["cod"],"application/vnd.rn-realmedia":["rm"],"application/vnd.rn-realmedia-vbr":["rmvb"],"application/vnd.route66.link66+xml":["link66"],"application/vnd.sailingtracker.track":["st"],"application/vnd.seemail":["see"],"application/vnd.sema":["sema"],"application/vnd.semd":["semd"],"application/vnd.semf":["semf"],"application/vnd.shana.informed.formdata":["ifm"],"application/vnd.shana.informed.formtemplate":["itp"],"application/vnd.shana.informed.interchange":["iif"],"application/vnd.shana.informed.package":["ipk"],"application/vnd.simtech-mindmapper":["twd","twds"],"application/vnd.smaf":["mmf"],"application/vnd.smart.teacher":["teacher"],"application/vnd.software602.filler.form+xml":["fo"],"application/vnd.solent.sdkm+xml":["sdkm","sdkd"],"application/vnd.spotfire.dxp":["dxp"],"application/vnd.spotfire.sfs":["sfs"],"application/vnd.stardivision.calc":["sdc"],"application/vnd.stardivision.draw":["sda"],"application/vnd.stardivision.impress":["sdd"],"application/vnd.stardivision.math":["smf"],"application/vnd.stardivision.writer":["sdw","vor"],"application/vnd.stardivision.writer-global":["sgl"],"application/vnd.stepmania.package":["smzip"],"application/vnd.stepmania.stepchart":["sm"],"application/vnd.sun.wadl+xml":["wadl"],"application/vnd.sun.xml.calc":["sxc"],"application/vnd.sun.xml.calc.template":["stc"],"application/vnd.sun.xml.draw":["sxd"],"application/vnd.sun.xml.draw.template":["std"],"application/vnd.sun.xml.impress":["sxi"],"application/vnd.sun.xml.impress.template":["sti"],"application/vnd.sun.xml.math":["sxm"],"application/vnd.sun.xml.writer":["sxw"],"application/vnd.sun.xml.writer.global":["sxg"],"application/vnd.sun.xml.writer.template":["stw"],"application/vnd.sus-calendar":["sus","susp"],"application/vnd.svd":["svd"],"application/vnd.symbian.install":["sis","sisx"],"application/vnd.syncml+xml":["xsm"],"application/vnd.syncml.dm+wbxml":["bdm"],"application/vnd.syncml.dm+xml":["xdm"],"application/vnd.syncml.dmddf+xml":["ddf"],"application/vnd.tao.intent-module-archive":["tao"],"application/vnd.tcpdump.pcap":["pcap","cap","dmp"],"application/vnd.tmobile-livetv":["tmo"],"application/vnd.trid.tpt":["tpt"],"application/vnd.triscape.mxs":["mxs"],"application/vnd.trueapp":["tra"],"application/vnd.ufdl":["ufd","ufdl"],"application/vnd.uiq.theme":["utz"],"application/vnd.umajin":["umj"],"application/vnd.unity":["unityweb"],"application/vnd.uoml+xml":["uoml"],"application/vnd.vcx":["vcx"],"application/vnd.visio":["vsd","vst","vss","vsw"],"application/vnd.visionary":["vis"],"application/vnd.vsf":["vsf"],"application/vnd.wap.wbxml":["wbxml"],"application/vnd.wap.wmlc":["wmlc"],"application/vnd.wap.wmlscriptc":["wmlsc"],"application/vnd.webturbo":["wtb"],"application/vnd.wolfram.player":["nbp"],"application/vnd.wordperfect":["wpd"],"application/vnd.wqd":["wqd"],"application/vnd.wt.stf":["stf"],"application/vnd.xara":["xar"],"application/vnd.xfdl":["xfdl"],"application/vnd.yamaha.hv-dic":["hvd"],"application/vnd.yamaha.hv-script":["hvs"],"application/vnd.yamaha.hv-voice":["hvp"],"application/vnd.yamaha.openscoreformat":["osf"],"application/vnd.yamaha.openscoreformat.osfpvg+xml":["osfpvg"],"application/vnd.yamaha.smaf-audio":["saf"],"application/vnd.yamaha.smaf-phrase":["spf"],"application/vnd.yellowriver-custom-menu":["cmp"],"application/vnd.zul":["zir","zirz"],"application/vnd.zzazz.deck+xml":["zaz"],"application/x-7z-compressed":["7z"],"application/x-abiword":["abw"],"application/x-ace-compressed":["ace"],"application/x-apple-diskimage":["*dmg"],"application/x-arj":["arj"],"application/x-authorware-bin":["aab","x32","u32","vox"],"application/x-authorware-map":["aam"],"application/x-authorware-seg":["aas"],"application/x-bcpio":["bcpio"],"application/x-bdoc":["*bdoc"],"application/x-bittorrent":["torrent"],"application/x-blorb":["blb","blorb"],"application/x-bzip":["bz"],"application/x-bzip2":["bz2","boz"],"application/x-cbr":["cbr","cba","cbt","cbz","cb7"],"application/x-cdlink":["vcd"],"application/x-cfs-compressed":["cfs"],"application/x-chat":["chat"],"application/x-chess-pgn":["pgn"],"application/x-chrome-extension":["crx"],"application/x-cocoa":["cco"],"application/x-conference":["nsc"],"application/x-cpio":["cpio"],"application/x-csh":["csh"],"application/x-debian-package":["*deb","udeb"],"application/x-dgc-compressed":["dgc"],"application/x-director":["dir","dcr","dxr","cst","cct","cxt","w3d","fgd","swa"],"application/x-doom":["wad"],"application/x-dtbncx+xml":["ncx"],"application/x-dtbook+xml":["dtb"],"application/x-dtbresource+xml":["res"],"application/x-dvi":["dvi"],"application/x-envoy":["evy"],"application/x-eva":["eva"],"application/x-font-bdf":["bdf"],"application/x-font-ghostscript":["gsf"],"application/x-font-linux-psf":["psf"],"application/x-font-pcf":["pcf"],"application/x-font-snf":["snf"],"application/x-font-type1":["pfa","pfb","pfm","afm"],"application/x-freearc":["arc"],"application/x-futuresplash":["spl"],"application/x-gca-compressed":["gca"],"application/x-glulx":["ulx"],"application/x-gnumeric":["gnumeric"],"application/x-gramps-xml":["gramps"],"application/x-gtar":["gtar"],"application/x-hdf":["hdf"],"application/x-httpd-php":["php"],"application/x-install-instructions":["install"],"application/x-iso9660-image":["*iso"],"application/x-iwork-keynote-sffkey":["*key"],"application/x-iwork-numbers-sffnumbers":["*numbers"],"application/x-iwork-pages-sffpages":["*pages"],"application/x-java-archive-diff":["jardiff"],"application/x-java-jnlp-file":["jnlp"],"application/x-keepass2":["kdbx"],"application/x-latex":["latex"],"application/x-lua-bytecode":["luac"],"application/x-lzh-compressed":["lzh","lha"],"application/x-makeself":["run"],"application/x-mie":["mie"],"application/x-mobipocket-ebook":["prc","mobi"],"application/x-ms-application":["application"],"application/x-ms-shortcut":["lnk"],"application/x-ms-wmd":["wmd"],"application/x-ms-wmz":["wmz"],"application/x-ms-xbap":["xbap"],"application/x-msaccess":["mdb"],"application/x-msbinder":["obd"],"application/x-mscardfile":["crd"],"application/x-msclip":["clp"],"application/x-msdos-program":["*exe"],"application/x-msdownload":["*exe","*dll","com","bat","*msi"],"application/x-msmediaview":["mvb","m13","m14"],"application/x-msmetafile":["*wmf","*wmz","*emf","emz"],"application/x-msmoney":["mny"],"application/x-mspublisher":["pub"],"application/x-msschedule":["scd"],"application/x-msterminal":["trm"],"application/x-mswrite":["wri"],"application/x-netcdf":["nc","cdf"],"application/x-ns-proxy-autoconfig":["pac"],"application/x-nzb":["nzb"],"application/x-perl":["pl","pm"],"application/x-pilot":["*prc","*pdb"],"application/x-pkcs12":["p12","pfx"],"application/x-pkcs7-certificates":["p7b","spc"],"application/x-pkcs7-certreqresp":["p7r"],"application/x-rar-compressed":["*rar"],"application/x-redhat-package-manager":["rpm"],"application/x-research-info-systems":["ris"],"application/x-sea":["sea"],"application/x-sh":["sh"],"application/x-shar":["shar"],"application/x-shockwave-flash":["swf"],"application/x-silverlight-app":["xap"],"application/x-sql":["sql"],"application/x-stuffit":["sit"],"application/x-stuffitx":["sitx"],"application/x-subrip":["srt"],"application/x-sv4cpio":["sv4cpio"],"application/x-sv4crc":["sv4crc"],"application/x-t3vm-image":["t3"],"application/x-tads":["gam"],"application/x-tar":["tar"],"application/x-tcl":["tcl","tk"],"application/x-tex":["tex"],"application/x-tex-tfm":["tfm"],"application/x-texinfo":["texinfo","texi"],"application/x-tgif":["*obj"],"application/x-ustar":["ustar"],"application/x-virtualbox-hdd":["hdd"],"application/x-virtualbox-ova":["ova"],"application/x-virtualbox-ovf":["ovf"],"application/x-virtualbox-vbox":["vbox"],"application/x-virtualbox-vbox-extpack":["vbox-extpack"],"application/x-virtualbox-vdi":["vdi"],"application/x-virtualbox-vhd":["vhd"],"application/x-virtualbox-vmdk":["vmdk"],"application/x-wais-source":["src"],"application/x-web-app-manifest+json":["webapp"],"application/x-x509-ca-cert":["der","crt","pem"],"application/x-xfig":["fig"],"application/x-xliff+xml":["*xlf"],"application/x-xpinstall":["xpi"],"application/x-xz":["xz"],"application/x-zmachine":["z1","z2","z3","z4","z5","z6","z7","z8"],"audio/vnd.dece.audio":["uva","uvva"],"audio/vnd.digital-winds":["eol"],"audio/vnd.dra":["dra"],"audio/vnd.dts":["dts"],"audio/vnd.dts.hd":["dtshd"],"audio/vnd.lucent.voice":["lvp"],"audio/vnd.ms-playready.media.pya":["pya"],"audio/vnd.nuera.ecelp4800":["ecelp4800"],"audio/vnd.nuera.ecelp7470":["ecelp7470"],"audio/vnd.nuera.ecelp9600":["ecelp9600"],"audio/vnd.rip":["rip"],"audio/x-aac":["aac"],"audio/x-aiff":["aif","aiff","aifc"],"audio/x-caf":["caf"],"audio/x-flac":["flac"],"audio/x-m4a":["*m4a"],"audio/x-matroska":["mka"],"audio/x-mpegurl":["m3u"],"audio/x-ms-wax":["wax"],"audio/x-ms-wma":["wma"],"audio/x-pn-realaudio":["ram","ra"],"audio/x-pn-realaudio-plugin":["rmp"],"audio/x-realaudio":["*ra"],"audio/x-wav":["*wav"],"chemical/x-cdx":["cdx"],"chemical/x-cif":["cif"],"chemical/x-cmdf":["cmdf"],"chemical/x-cml":["cml"],"chemical/x-csml":["csml"],"chemical/x-xyz":["xyz"],"image/prs.btif":["btif"],"image/prs.pti":["pti"],"image/vnd.adobe.photoshop":["psd"],"image/vnd.airzip.accelerator.azv":["azv"],"image/vnd.dece.graphic":["uvi","uvvi","uvg","uvvg"],"image/vnd.djvu":["djvu","djv"],"image/vnd.dvb.subtitle":["*sub"],"image/vnd.dwg":["dwg"],"image/vnd.dxf":["dxf"],"image/vnd.fastbidsheet":["fbs"],"image/vnd.fpx":["fpx"],"image/vnd.fst":["fst"],"image/vnd.fujixerox.edmics-mmr":["mmr"],"image/vnd.fujixerox.edmics-rlc":["rlc"],"image/vnd.microsoft.icon":["ico"],"image/vnd.ms-dds":["dds"],"image/vnd.ms-modi":["mdi"],"image/vnd.ms-photo":["wdp"],"image/vnd.net-fpx":["npx"],"image/vnd.pco.b16":["b16"],"image/vnd.tencent.tap":["tap"],"image/vnd.valve.source.texture":["vtf"],"image/vnd.wap.wbmp":["wbmp"],"image/vnd.xiff":["xif"],"image/vnd.zbrush.pcx":["pcx"],"image/x-3ds":["3ds"],"image/x-cmu-raster":["ras"],"image/x-cmx":["cmx"],"image/x-freehand":["fh","fhc","fh4","fh5","fh7"],"image/x-icon":["*ico"],"image/x-jng":["jng"],"image/x-mrsid-image":["sid"],"image/x-ms-bmp":["*bmp"],"image/x-pcx":["*pcx"],"image/x-pict":["pic","pct"],"image/x-portable-anymap":["pnm"],"image/x-portable-bitmap":["pbm"],"image/x-portable-graymap":["pgm"],"image/x-portable-pixmap":["ppm"],"image/x-rgb":["rgb"],"image/x-tga":["tga"],"image/x-xbitmap":["xbm"],"image/x-xpixmap":["xpm"],"image/x-xwindowdump":["xwd"],"message/vnd.wfa.wsc":["wsc"],"model/vnd.collada+xml":["dae"],"model/vnd.dwf":["dwf"],"model/vnd.gdl":["gdl"],"model/vnd.gtw":["gtw"],"model/vnd.mts":["mts"],"model/vnd.opengex":["ogex"],"model/vnd.parasolid.transmit.binary":["x_b"],"model/vnd.parasolid.transmit.text":["x_t"],"model/vnd.sap.vds":["vds"],"model/vnd.usdz+zip":["usdz"],"model/vnd.valve.source.compiled-map":["bsp"],"model/vnd.vtu":["vtu"],"text/prs.lines.tag":["dsc"],"text/vnd.curl":["curl"],"text/vnd.curl.dcurl":["dcurl"],"text/vnd.curl.mcurl":["mcurl"],"text/vnd.curl.scurl":["scurl"],"text/vnd.dvb.subtitle":["sub"],"text/vnd.fly":["fly"],"text/vnd.fmi.flexstor":["flx"],"text/vnd.graphviz":["gv"],"text/vnd.in3d.3dml":["3dml"],"text/vnd.in3d.spot":["spot"],"text/vnd.sun.j2me.app-descriptor":["jad"],"text/vnd.wap.wml":["wml"],"text/vnd.wap.wmlscript":["wmls"],"text/x-asm":["s","asm"],"text/x-c":["c","cc","cxx","cpp","h","hh","dic"],"text/x-component":["htc"],"text/x-fortran":["f","for","f77","f90"],"text/x-handlebars-template":["hbs"],"text/x-java-source":["java"],"text/x-lua":["lua"],"text/x-markdown":["mkd"],"text/x-nfo":["nfo"],"text/x-opml":["opml"],"text/x-org":["*org"],"text/x-pascal":["p","pas"],"text/x-processing":["pde"],"text/x-sass":["sass"],"text/x-scss":["scss"],"text/x-setext":["etx"],"text/x-sfv":["sfv"],"text/x-suse-ymp":["ymp"],"text/x-uuencode":["uu"],"text/x-vcalendar":["vcs"],"text/x-vcard":["vcf"],"video/vnd.dece.hd":["uvh","uvvh"],"video/vnd.dece.mobile":["uvm","uvvm"],"video/vnd.dece.pd":["uvp","uvvp"],"video/vnd.dece.sd":["uvs","uvvs"],"video/vnd.dece.video":["uvv","uvvv"],"video/vnd.dvb.file":["dvb"],"video/vnd.fvt":["fvt"],"video/vnd.mpegurl":["mxu","m4u"],"video/vnd.ms-playready.media.pyv":["pyv"],"video/vnd.uvvu.mp4":["uvu","uvvu"],"video/vnd.vivo":["viv"],"video/x-f4v":["f4v"],"video/x-fli":["fli"],"video/x-flv":["flv"],"video/x-m4v":["m4v"],"video/x-matroska":["mkv","mk3d","mks"],"video/x-mng":["mng"],"video/x-ms-asf":["asf","asx"],"video/x-ms-vob":["vob"],"video/x-ms-wm":["wm"],"video/x-ms-wmv":["wmv"],"video/x-ms-wmx":["wmx"],"video/x-ms-wvx":["wvx"],"video/x-msvideo":["avi"],"video/x-sgi-movie":["movie"],"video/x-smv":["smv"],"x-conference/x-cooltalk":["ice"]}; + +/***/ }), + +/***/ 3725: +/***/ ((module) => { + +module.exports = {"application/andrew-inset":["ez"],"application/applixware":["aw"],"application/atom+xml":["atom"],"application/atomcat+xml":["atomcat"],"application/atomdeleted+xml":["atomdeleted"],"application/atomsvc+xml":["atomsvc"],"application/atsc-dwd+xml":["dwd"],"application/atsc-held+xml":["held"],"application/atsc-rsat+xml":["rsat"],"application/bdoc":["bdoc"],"application/calendar+xml":["xcs"],"application/ccxml+xml":["ccxml"],"application/cdfx+xml":["cdfx"],"application/cdmi-capability":["cdmia"],"application/cdmi-container":["cdmic"],"application/cdmi-domain":["cdmid"],"application/cdmi-object":["cdmio"],"application/cdmi-queue":["cdmiq"],"application/cu-seeme":["cu"],"application/dash+xml":["mpd"],"application/davmount+xml":["davmount"],"application/docbook+xml":["dbk"],"application/dssc+der":["dssc"],"application/dssc+xml":["xdssc"],"application/ecmascript":["es","ecma"],"application/emma+xml":["emma"],"application/emotionml+xml":["emotionml"],"application/epub+zip":["epub"],"application/exi":["exi"],"application/express":["exp"],"application/fdt+xml":["fdt"],"application/font-tdpfr":["pfr"],"application/geo+json":["geojson"],"application/gml+xml":["gml"],"application/gpx+xml":["gpx"],"application/gxf":["gxf"],"application/gzip":["gz"],"application/hjson":["hjson"],"application/hyperstudio":["stk"],"application/inkml+xml":["ink","inkml"],"application/ipfix":["ipfix"],"application/its+xml":["its"],"application/java-archive":["jar","war","ear"],"application/java-serialized-object":["ser"],"application/java-vm":["class"],"application/javascript":["js","mjs"],"application/json":["json","map"],"application/json5":["json5"],"application/jsonml+json":["jsonml"],"application/ld+json":["jsonld"],"application/lgr+xml":["lgr"],"application/lost+xml":["lostxml"],"application/mac-binhex40":["hqx"],"application/mac-compactpro":["cpt"],"application/mads+xml":["mads"],"application/manifest+json":["webmanifest"],"application/marc":["mrc"],"application/marcxml+xml":["mrcx"],"application/mathematica":["ma","nb","mb"],"application/mathml+xml":["mathml"],"application/mbox":["mbox"],"application/mediaservercontrol+xml":["mscml"],"application/metalink+xml":["metalink"],"application/metalink4+xml":["meta4"],"application/mets+xml":["mets"],"application/mmt-aei+xml":["maei"],"application/mmt-usd+xml":["musd"],"application/mods+xml":["mods"],"application/mp21":["m21","mp21"],"application/mp4":["mp4s","m4p"],"application/msword":["doc","dot"],"application/mxf":["mxf"],"application/n-quads":["nq"],"application/n-triples":["nt"],"application/node":["cjs"],"application/octet-stream":["bin","dms","lrf","mar","so","dist","distz","pkg","bpk","dump","elc","deploy","exe","dll","deb","dmg","iso","img","msi","msp","msm","buffer"],"application/oda":["oda"],"application/oebps-package+xml":["opf"],"application/ogg":["ogx"],"application/omdoc+xml":["omdoc"],"application/onenote":["onetoc","onetoc2","onetmp","onepkg"],"application/oxps":["oxps"],"application/p2p-overlay+xml":["relo"],"application/patch-ops-error+xml":["xer"],"application/pdf":["pdf"],"application/pgp-encrypted":["pgp"],"application/pgp-signature":["asc","sig"],"application/pics-rules":["prf"],"application/pkcs10":["p10"],"application/pkcs7-mime":["p7m","p7c"],"application/pkcs7-signature":["p7s"],"application/pkcs8":["p8"],"application/pkix-attr-cert":["ac"],"application/pkix-cert":["cer"],"application/pkix-crl":["crl"],"application/pkix-pkipath":["pkipath"],"application/pkixcmp":["pki"],"application/pls+xml":["pls"],"application/postscript":["ai","eps","ps"],"application/provenance+xml":["provx"],"application/pskc+xml":["pskcxml"],"application/raml+yaml":["raml"],"application/rdf+xml":["rdf","owl"],"application/reginfo+xml":["rif"],"application/relax-ng-compact-syntax":["rnc"],"application/resource-lists+xml":["rl"],"application/resource-lists-diff+xml":["rld"],"application/rls-services+xml":["rs"],"application/route-apd+xml":["rapd"],"application/route-s-tsid+xml":["sls"],"application/route-usd+xml":["rusd"],"application/rpki-ghostbusters":["gbr"],"application/rpki-manifest":["mft"],"application/rpki-roa":["roa"],"application/rsd+xml":["rsd"],"application/rss+xml":["rss"],"application/rtf":["rtf"],"application/sbml+xml":["sbml"],"application/scvp-cv-request":["scq"],"application/scvp-cv-response":["scs"],"application/scvp-vp-request":["spq"],"application/scvp-vp-response":["spp"],"application/sdp":["sdp"],"application/senml+xml":["senmlx"],"application/sensml+xml":["sensmlx"],"application/set-payment-initiation":["setpay"],"application/set-registration-initiation":["setreg"],"application/shf+xml":["shf"],"application/sieve":["siv","sieve"],"application/smil+xml":["smi","smil"],"application/sparql-query":["rq"],"application/sparql-results+xml":["srx"],"application/srgs":["gram"],"application/srgs+xml":["grxml"],"application/sru+xml":["sru"],"application/ssdl+xml":["ssdl"],"application/ssml+xml":["ssml"],"application/swid+xml":["swidtag"],"application/tei+xml":["tei","teicorpus"],"application/thraud+xml":["tfi"],"application/timestamped-data":["tsd"],"application/toml":["toml"],"application/trig":["trig"],"application/ttml+xml":["ttml"],"application/ubjson":["ubj"],"application/urc-ressheet+xml":["rsheet"],"application/urc-targetdesc+xml":["td"],"application/voicexml+xml":["vxml"],"application/wasm":["wasm"],"application/widget":["wgt"],"application/winhlp":["hlp"],"application/wsdl+xml":["wsdl"],"application/wspolicy+xml":["wspolicy"],"application/xaml+xml":["xaml"],"application/xcap-att+xml":["xav"],"application/xcap-caps+xml":["xca"],"application/xcap-diff+xml":["xdf"],"application/xcap-el+xml":["xel"],"application/xcap-ns+xml":["xns"],"application/xenc+xml":["xenc"],"application/xhtml+xml":["xhtml","xht"],"application/xliff+xml":["xlf"],"application/xml":["xml","xsl","xsd","rng"],"application/xml-dtd":["dtd"],"application/xop+xml":["xop"],"application/xproc+xml":["xpl"],"application/xslt+xml":["*xsl","xslt"],"application/xspf+xml":["xspf"],"application/xv+xml":["mxml","xhvml","xvml","xvm"],"application/yang":["yang"],"application/yin+xml":["yin"],"application/zip":["zip"],"audio/3gpp":["*3gpp"],"audio/adpcm":["adp"],"audio/amr":["amr"],"audio/basic":["au","snd"],"audio/midi":["mid","midi","kar","rmi"],"audio/mobile-xmf":["mxmf"],"audio/mp3":["*mp3"],"audio/mp4":["m4a","mp4a"],"audio/mpeg":["mpga","mp2","mp2a","mp3","m2a","m3a"],"audio/ogg":["oga","ogg","spx","opus"],"audio/s3m":["s3m"],"audio/silk":["sil"],"audio/wav":["wav"],"audio/wave":["*wav"],"audio/webm":["weba"],"audio/xm":["xm"],"font/collection":["ttc"],"font/otf":["otf"],"font/ttf":["ttf"],"font/woff":["woff"],"font/woff2":["woff2"],"image/aces":["exr"],"image/apng":["apng"],"image/avif":["avif"],"image/bmp":["bmp"],"image/cgm":["cgm"],"image/dicom-rle":["drle"],"image/emf":["emf"],"image/fits":["fits"],"image/g3fax":["g3"],"image/gif":["gif"],"image/heic":["heic"],"image/heic-sequence":["heics"],"image/heif":["heif"],"image/heif-sequence":["heifs"],"image/hej2k":["hej2"],"image/hsj2":["hsj2"],"image/ief":["ief"],"image/jls":["jls"],"image/jp2":["jp2","jpg2"],"image/jpeg":["jpeg","jpg","jpe"],"image/jph":["jph"],"image/jphc":["jhc"],"image/jpm":["jpm"],"image/jpx":["jpx","jpf"],"image/jxr":["jxr"],"image/jxra":["jxra"],"image/jxrs":["jxrs"],"image/jxs":["jxs"],"image/jxsc":["jxsc"],"image/jxsi":["jxsi"],"image/jxss":["jxss"],"image/ktx":["ktx"],"image/ktx2":["ktx2"],"image/png":["png"],"image/sgi":["sgi"],"image/svg+xml":["svg","svgz"],"image/t38":["t38"],"image/tiff":["tif","tiff"],"image/tiff-fx":["tfx"],"image/webp":["webp"],"image/wmf":["wmf"],"message/disposition-notification":["disposition-notification"],"message/global":["u8msg"],"message/global-delivery-status":["u8dsn"],"message/global-disposition-notification":["u8mdn"],"message/global-headers":["u8hdr"],"message/rfc822":["eml","mime"],"model/3mf":["3mf"],"model/gltf+json":["gltf"],"model/gltf-binary":["glb"],"model/iges":["igs","iges"],"model/mesh":["msh","mesh","silo"],"model/mtl":["mtl"],"model/obj":["obj"],"model/step+xml":["stpx"],"model/step+zip":["stpz"],"model/step-xml+zip":["stpxz"],"model/stl":["stl"],"model/vrml":["wrl","vrml"],"model/x3d+binary":["*x3db","x3dbz"],"model/x3d+fastinfoset":["x3db"],"model/x3d+vrml":["*x3dv","x3dvz"],"model/x3d+xml":["x3d","x3dz"],"model/x3d-vrml":["x3dv"],"text/cache-manifest":["appcache","manifest"],"text/calendar":["ics","ifb"],"text/coffeescript":["coffee","litcoffee"],"text/css":["css"],"text/csv":["csv"],"text/html":["html","htm","shtml"],"text/jade":["jade"],"text/jsx":["jsx"],"text/less":["less"],"text/markdown":["markdown","md"],"text/mathml":["mml"],"text/mdx":["mdx"],"text/n3":["n3"],"text/plain":["txt","text","conf","def","list","log","in","ini"],"text/richtext":["rtx"],"text/rtf":["*rtf"],"text/sgml":["sgml","sgm"],"text/shex":["shex"],"text/slim":["slim","slm"],"text/spdx":["spdx"],"text/stylus":["stylus","styl"],"text/tab-separated-values":["tsv"],"text/troff":["t","tr","roff","man","me","ms"],"text/turtle":["ttl"],"text/uri-list":["uri","uris","urls"],"text/vcard":["vcard"],"text/vtt":["vtt"],"text/xml":["*xml"],"text/yaml":["yaml","yml"],"video/3gpp":["3gp","3gpp"],"video/3gpp2":["3g2"],"video/h261":["h261"],"video/h263":["h263"],"video/h264":["h264"],"video/iso.segment":["m4s"],"video/jpeg":["jpgv"],"video/jpm":["*jpm","jpgm"],"video/mj2":["mj2","mjp2"],"video/mp2t":["ts"],"video/mp4":["mp4","mp4v","mpg4"],"video/mpeg":["mpeg","mpg","mpe","m1v","m2v"],"video/ogg":["ogv"],"video/quicktime":["qt","mov"],"video/webm":["webm"]}; + /***/ }), /***/ 3772: @@ -52518,6 +72488,175 @@ function regExpEscape (s) { } +/***/ }), + +/***/ 744: +/***/ ((module) => { + +/** + * Helpers. + */ + +var s = 1000; +var m = s * 60; +var h = m * 60; +var d = h * 24; +var w = d * 7; +var y = d * 365.25; + +/** + * Parse or format the given `val`. + * + * Options: + * + * - `long` verbose formatting [false] + * + * @param {String|Number} val + * @param {Object} [options] + * @throws {Error} throw an error if val is not a non-empty string or a number + * @return {String|Number} + * @api public + */ + +module.exports = function(val, options) { + options = options || {}; + var type = typeof val; + if (type === 'string' && val.length > 0) { + return parse(val); + } else if (type === 'number' && isFinite(val)) { + return options.long ? fmtLong(val) : fmtShort(val); + } + throw new Error( + 'val is not a non-empty string or a valid number. val=' + + JSON.stringify(val) + ); +}; + +/** + * Parse the given `str` and return milliseconds. + * + * @param {String} str + * @return {Number} + * @api private + */ + +function parse(str) { + str = String(str); + if (str.length > 100) { + return; + } + var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( + str + ); + if (!match) { + return; + } + var n = parseFloat(match[1]); + var type = (match[2] || 'ms').toLowerCase(); + switch (type) { + case 'years': + case 'year': + case 'yrs': + case 'yr': + case 'y': + return n * y; + case 'weeks': + case 'week': + case 'w': + return n * w; + case 'days': + case 'day': + case 'd': + return n * d; + case 'hours': + case 'hour': + case 'hrs': + case 'hr': + case 'h': + return n * h; + case 'minutes': + case 'minute': + case 'mins': + case 'min': + case 'm': + return n * m; + case 'seconds': + case 'second': + case 'secs': + case 'sec': + case 's': + return n * s; + case 'milliseconds': + case 'millisecond': + case 'msecs': + case 'msec': + case 'ms': + return n; + default: + return undefined; + } +} + +/** + * Short format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtShort(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return Math.round(ms / d) + 'd'; + } + if (msAbs >= h) { + return Math.round(ms / h) + 'h'; + } + if (msAbs >= m) { + return Math.round(ms / m) + 'm'; + } + if (msAbs >= s) { + return Math.round(ms / s) + 's'; + } + return ms + 'ms'; +} + +/** + * Long format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtLong(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return plural(ms, msAbs, d, 'day'); + } + if (msAbs >= h) { + return plural(ms, msAbs, h, 'hour'); + } + if (msAbs >= m) { + return plural(ms, msAbs, m, 'minute'); + } + if (msAbs >= s) { + return plural(ms, msAbs, s, 'second'); + } + return ms + ' ms'; +} + +/** + * Pluralization helper. + */ + +function plural(ms, msAbs, n, name) { + var isPlural = msAbs >= n * 1.5; + return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : ''); +} + + /***/ }), /***/ 6705: @@ -53891,10 +74030,6 @@ function getNodeRequestOptions(request) { agent = agent(parsedURL); } - if (!headers.has('Connection') && !agent) { - headers.set('Connection', 'close'); - } - // HTTP-network fetch step 4.2 // chunked encoding is handled by Node.js @@ -53943,6 +74078,20 @@ const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); }; +/** + * isSameProtocol reports whether the two provided URLs use the same protocol. + * + * Both domains must already be in canonical form. + * @param {string|URL} original + * @param {string|URL} destination + */ +const isSameProtocol = function isSameProtocol(destination, original) { + const orig = new URL$1(original).protocol; + const dest = new URL$1(destination).protocol; + + return orig === dest; +}; + /** * Fetch function * @@ -53974,7 +74123,7 @@ function fetch(url, opts) { let error = new AbortError('The user aborted a request.'); reject(error); if (request.body && request.body instanceof Stream.Readable) { - request.body.destroy(error); + destroyStream(request.body, error); } if (!response || !response.body) return; response.body.emit('error', error); @@ -54015,9 +74164,43 @@ function fetch(url, opts) { req.on('error', function (err) { reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); + + if (response && response.body) { + destroyStream(response.body, err); + } + finalize(); }); + fixResponseChunkedTransferBadEnding(req, function (err) { + if (signal && signal.aborted) { + return; + } + + if (response && response.body) { + destroyStream(response.body, err); + } + }); + + /* c8 ignore next 18 */ + if (parseInt(process.version.substring(1)) < 14) { + // Before Node.js 14, pipeline() does not fully support async iterators and does not always + // properly handle when the socket close/end events are out of order. + req.on('socket', function (s) { + s.addListener('close', function (hadError) { + // if a data listener is still present we didn't end cleanly + const hasDataListener = s.listenerCount('data') > 0; + + // if end happened before close but the socket didn't emit an error, do it now + if (response && hasDataListener && !hadError && !(signal && signal.aborted)) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + response.body.emit('error', err); + } + }); + }); + } + req.on('response', function (res) { clearTimeout(reqTimeout); @@ -54089,7 +74272,7 @@ function fetch(url, opts) { size: request.size }; - if (!isDomainOrSubdomain(request.url, locationURL)) { + if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) { for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { requestOpts.headers.delete(name); } @@ -54182,6 +74365,13 @@ function fetch(url, opts) { response = new Response(body, response_options); resolve(response); }); + raw.on('end', function () { + // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted. + if (!response) { + response = new Response(body, response_options); + resolve(response); + } + }); return; } @@ -54201,6 +74391,44 @@ function fetch(url, opts) { writeToStream(req, request); }); } +function fixResponseChunkedTransferBadEnding(request, errorCallback) { + let socket; + + request.on('socket', function (s) { + socket = s; + }); + + request.on('response', function (response) { + const headers = response.headers; + + if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) { + response.once('close', function (hadError) { + // tests for socket presence, as in some situations the + // the 'socket' event is not triggered for the request + // (happens in deno), avoids `TypeError` + // if a data listener is still present we didn't end cleanly + const hasDataListener = socket && socket.listenerCount('data') > 0; + + if (hasDataListener && !hadError) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + errorCallback(err); + } + }); + } + }); +} + +function destroyStream(stream, err) { + if (stream.destroy) { + stream.destroy(err); + } else { + // node < 8 + stream.emit('error', err); + stream.end(); + } +} + /** * Redirect code matching * @@ -54221,6 +74449,7 @@ exports.Headers = Headers; exports.Request = Request; exports.Response = Response; exports.FetchError = FetchError; +exports.AbortError = AbortError; /***/ }), @@ -56385,6 +76614,3674 @@ module.exports.implForWrapper = function (wrapper) { +/***/ }), + +/***/ 5560: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var wrappy = __nccwpck_require__(8264) +module.exports = wrappy(once) +module.exports.strict = wrappy(onceStrict) + +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true + }) + + Object.defineProperty(Function.prototype, 'onceStrict', { + value: function () { + return onceStrict(this) + }, + configurable: true + }) +}) + +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) + } + f.called = false + return f +} + +function onceStrict (fn) { + var f = function () { + if (f.called) + throw new Error(f.onceError) + f.called = true + return f.value = fn.apply(this, arguments) + } + var name = fn.name || 'Function wrapped with `once`' + f.onceError = name + " shouldn't be called more than once" + f.called = false + return f +} + + +/***/ }), + +/***/ 5500: +/***/ ((module) => { + +"use strict"; + + +const codes = {}; + +function createErrorType(code, message, Base) { + if (!Base) { + Base = Error + } + + function getMessage (arg1, arg2, arg3) { + if (typeof message === 'string') { + return message + } else { + return message(arg1, arg2, arg3) + } + } + + class NodeError extends Base { + constructor (arg1, arg2, arg3) { + super(getMessage(arg1, arg2, arg3)); + } + } + + NodeError.prototype.name = Base.name; + NodeError.prototype.code = code; + + codes[code] = NodeError; +} + +// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js +function oneOf(expected, thing) { + if (Array.isArray(expected)) { + const len = expected.length; + expected = expected.map((i) => String(i)); + if (len > 2) { + return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` + + expected[len - 1]; + } else if (len === 2) { + return `one of ${thing} ${expected[0]} or ${expected[1]}`; + } else { + return `of ${thing} ${expected[0]}`; + } + } else { + return `of ${thing} ${String(expected)}`; + } +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith +function startsWith(str, search, pos) { + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith +function endsWith(str, search, this_len) { + if (this_len === undefined || this_len > str.length) { + this_len = str.length; + } + return str.substring(this_len - search.length, this_len) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes +function includes(str, search, start) { + if (typeof start !== 'number') { + start = 0; + } + + if (start + search.length > str.length) { + return false; + } else { + return str.indexOf(search, start) !== -1; + } +} + +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { + return 'The value "' + value + '" is invalid for option "' + name + '"' +}, TypeError); +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { + // determiner: 'must be' or 'must not be' + let determiner; + if (typeof expected === 'string' && startsWith(expected, 'not ')) { + determiner = 'must not be'; + expected = expected.replace(/^not /, ''); + } else { + determiner = 'must be'; + } + + let msg; + if (endsWith(name, ' argument')) { + // For cases like 'first argument' + msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`; + } else { + const type = includes(name, '.') ? 'property' : 'argument'; + msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`; + } + + msg += `. Received type ${typeof actual}`; + return msg; +}, TypeError); +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { + return 'The ' + name + ' method is not implemented' +}); +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); +createErrorType('ERR_STREAM_DESTROYED', function (name) { + return 'Cannot call ' + name + ' after a stream was destroyed'; +}); +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { + return 'Unknown encoding: ' + arg +}, TypeError); +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); + +module.exports.F = codes; + + +/***/ }), + +/***/ 2063: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. + + + +/**/ +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) keys.push(key); + return keys; +}; +/**/ + +module.exports = Duplex; +var Readable = __nccwpck_require__(9274); +var Writable = __nccwpck_require__(8797); +__nccwpck_require__(9598)(Duplex, Readable); +{ + // Allow the keys array to be GC'ed. + var keys = objectKeys(Writable.prototype); + for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + } +} +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + Readable.call(this, options); + Writable.call(this, options); + this.allowHalfOpen = true; + if (options) { + if (options.readable === false) this.readable = false; + if (options.writable === false) this.writable = false; + if (options.allowHalfOpen === false) { + this.allowHalfOpen = false; + this.once('end', onend); + } + } +} +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); +Object.defineProperty(Duplex.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); +Object.defineProperty(Duplex.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); + +// the no-half-open enforcer +function onend() { + // If the writable side ended, then we're ok. + if (this._writableState.ended) return; + + // no more data can be written. + // But allow more writes to happen in this tick. + process.nextTick(onEndNT, this); +} +function onEndNT(self) { + self.end(); +} +Object.defineProperty(Duplex.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._readableState === undefined || this._writableState === undefined) { + return false; + } + return this._readableState.destroyed && this._writableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } +}); + +/***/ }), + +/***/ 5283: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. + + + +module.exports = PassThrough; +var Transform = __nccwpck_require__(2337); +__nccwpck_require__(9598)(PassThrough, Transform); +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + Transform.call(this, options); +} +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; + +/***/ }), + +/***/ 9274: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + +module.exports = Readable; + +/**/ +var Duplex; +/**/ + +Readable.ReadableState = ReadableState; + +/**/ +var EE = (__nccwpck_require__(4434).EventEmitter); +var EElistenerCount = function EElistenerCount(emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ +var Stream = __nccwpck_require__(3283); +/**/ + +var Buffer = (__nccwpck_require__(181).Buffer); +var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ +var debugUtil = __nccwpck_require__(9023); +var debug; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function debug() {}; +} +/**/ + +var BufferList = __nccwpck_require__(7336); +var destroyImpl = __nccwpck_require__(5089); +var _require = __nccwpck_require__(4874), + getHighWaterMark = _require.getHighWaterMark; +var _require$codes = (__nccwpck_require__(5500)/* .codes */ .F), + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; + +// Lazy loaded to improve the startup performance. +var StringDecoder; +var createReadableStreamAsyncIterator; +var from; +__nccwpck_require__(9598)(Readable, Stream); +var errorOrDestroy = destroyImpl.errorOrDestroy; +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); + + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +} +function ReadableState(options, stream, isDuplex) { + Duplex = Duplex || __nccwpck_require__(2063); + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; + + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; + + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); + + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + + // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + this.sync = true; + + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + this.paused = true; + + // Should close be emitted on destroy. Defaults to true. + this.emitClose = options.emitClose !== false; + + // Should .destroy() be called after 'end' (and potentially 'finish') + this.autoDestroy = !!options.autoDestroy; + + // has it been destroyed + this.destroyed = false; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; + + // if true, a maybeReadMore has been scheduled + this.readingMore = false; + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = (__nccwpck_require__(634)/* .StringDecoder */ .I); + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} +function Readable(options) { + Duplex = Duplex || __nccwpck_require__(2063); + if (!(this instanceof Readable)) return new Readable(options); + + // Checking for a Stream.Duplex instance is faster here instead of inside + // the ReadableState constructor, at least with V8 6.5 + var isDuplex = this instanceof Duplex; + this._readableState = new ReadableState(options, this, isDuplex); + + // legacy + this.readable = true; + if (options) { + if (typeof options.read === 'function') this._read = options.read; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + } + Stream.call(this); +} +Object.defineProperty(Readable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._readableState === undefined) { + return false; + } + return this._readableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + } +}); +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; +Readable.prototype._destroy = function (err, cb) { + cb(err); +}; + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; + } + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; + +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + debug('readableAddChunk', chunk); + var state = stream._readableState; + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + if (er) { + errorOrDestroy(stream, er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + if (addToFront) { + if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true); + } else if (state.ended) { + errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()); + } else if (state.destroyed) { + return false; + } else { + state.reading = false; + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + maybeReadMore(stream, state); + } + } + + // We can push more data if we are below the highWaterMark. + // Also, if we have no data yet, we can stand some more bytes. + // This is to work around cases where hwm=0, such as the repl. + return !state.ended && (state.length < state.highWaterMark || state.length === 0); +} +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + state.awaitDrain = 0; + stream.emit('data', chunk); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + if (state.needReadable) emitReadable(stream); + } + maybeReadMore(stream, state); +} +function chunkInvalid(state, chunk) { + var er; + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk); + } + return er; +} +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; + +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = (__nccwpck_require__(634)/* .StringDecoder */ .I); + var decoder = new StringDecoder(enc); + this._readableState.decoder = decoder; + // If setEncoding(null), decoder.encoding equals utf8 + this._readableState.encoding = this._readableState.decoder.encoding; + + // Iterate over current buffer to convert already stored Buffers: + var p = this._readableState.buffer.head; + var content = ''; + while (p !== null) { + content += decoder.write(p.data); + p = p.next; + } + this._readableState.buffer.clear(); + if (content !== '') this._readableState.buffer.push(content); + this._readableState.length = content.length; + return this; +}; + +// Don't raise the hwm > 1GB +var MAX_HWM = 0x40000000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE. + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; +} + +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + // Don't have enough + if (!state.ended) { + state.needReadable = true; + return 0; + } + return state.length; +} + +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + if (n !== 0) state.emittedReadable = false; + + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + n = howMuchToRead(n, state); + + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } + + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); + + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } + + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); + } + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + if (ret === null) { + state.needReadable = state.length <= state.highWaterMark; + n = 0; + } else { + state.length -= n; + state.awaitDrain = 0; + } + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this); + } + if (ret !== null) this.emit('data', ret); + return ret; +}; +function onEofChunk(stream, state) { + debug('onEofChunk'); + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + if (state.sync) { + // if we are sync, wait until next tick to emit the data. + // Otherwise we risk emitting data in the flow() + // the readable code triggers during a read() call + emitReadable(stream); + } else { + // emit 'readable' now to make sure it gets picked up. + state.needReadable = false; + if (!state.emittedReadable) { + state.emittedReadable = true; + emitReadable_(stream); + } + } +} + +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + debug('emitReadable', state.needReadable, state.emittedReadable); + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + process.nextTick(emitReadable_, stream); + } +} +function emitReadable_(stream) { + var state = stream._readableState; + debug('emitReadable_', state.destroyed, state.length, state.ended); + if (!state.destroyed && (state.length || state.ended)) { + stream.emit('readable'); + state.emittedReadable = false; + } + + // The stream needs another readable event if + // 1. It is not flowing, as the flow mechanism will take + // care of it. + // 2. It is not ended. + // 3. It is below the highWaterMark, so we can schedule + // another readable later. + state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark; + flow(stream); +} + +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + process.nextTick(maybeReadMore_, stream, state); + } +} +function maybeReadMore_(stream, state) { + // Attempt to read more data if we should. + // + // The conditions for reading more data are (one of): + // - Not enough data buffered (state.length < state.highWaterMark). The loop + // is responsible for filling the buffer with enough data if such data + // is available. If highWaterMark is 0 and we are not in the flowing mode + // we should _not_ attempt to buffer any extra data. We'll get more data + // when the stream consumer calls read() instead. + // - No data in the buffer, and the stream is in flowing mode. In this mode + // the loop below is responsible for ensuring read() is called. Failing to + // call read here would abort the flow and there's no other mechanism for + // continuing the flow if the stream consumer has just subscribed to the + // 'data' event. + // + // In addition to the above conditions to keep reading data, the following + // conditions prevent the data from being read: + // - The stream has ended (state.ended). + // - There is already a pending 'read' operation (state.reading). This is a + // case where the the stream has called the implementation defined _read() + // method, but they are processing the call asynchronously and have _not_ + // called push() with new data. In this case we skip performing more + // read()s. The execution ends in this method again after the _read() ends + // up calling push() with more data. + while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) { + var len = state.length; + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break; + } + state.readingMore = false; +} + +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()')); +}; +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn); + dest.on('unpipe', onunpipe); + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } + function onend() { + debug('onend'); + dest.end(); + } + + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); + cleanedUp = true; + + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + var ret = dest.write(chunk); + debug('dest.write', ret); + if (ret === false) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', state.awaitDrain); + state.awaitDrain++; + } + src.pause(); + } + } + + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er); + } + + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); + + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } + + // tell the dest that it's being piped to + dest.emit('pipe', src); + + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + return dest; +}; +function pipeOnDrain(src) { + return function pipeOnDrainFunctionResult() { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { + hasUnpiped: false + }; + + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; + + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + if (!dest) dest = state.pipes; + + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; + } + + // slow case. multiple pipe destinations. + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + for (var i = 0; i < len; i++) dests[i].emit('unpipe', this, { + hasUnpiped: false + }); + return this; + } + + // try to find the right one. + var index = indexOf(state.pipes, dest); + if (index === -1) return this; + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + dest.emit('unpipe', this, unpipeInfo); + return this; +}; + +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + var state = this._readableState; + if (ev === 'data') { + // update readableListening so that resume() may be a no-op + // a few lines down. This is needed to support once('readable'). + state.readableListening = this.listenerCount('readable') > 0; + + // Try start flowing on next tick if stream isn't explicitly paused + if (state.flowing !== false) this.resume(); + } else if (ev === 'readable') { + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.flowing = false; + state.emittedReadable = false; + debug('on readable', state.length, state.reading); + if (state.length) { + emitReadable(this); + } else if (!state.reading) { + process.nextTick(nReadingNextTick, this); + } + } + } + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; +Readable.prototype.removeListener = function (ev, fn) { + var res = Stream.prototype.removeListener.call(this, ev, fn); + if (ev === 'readable') { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + return res; +}; +Readable.prototype.removeAllListeners = function (ev) { + var res = Stream.prototype.removeAllListeners.apply(this, arguments); + if (ev === 'readable' || ev === undefined) { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + return res; +}; +function updateReadableListening(self) { + var state = self._readableState; + state.readableListening = self.listenerCount('readable') > 0; + if (state.resumeScheduled && !state.paused) { + // flowing needs to be set to true now, otherwise + // the upcoming resume will not flow. + state.flowing = true; + + // crude way to check if we should resume + } else if (self.listenerCount('data') > 0) { + self.resume(); + } +} +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} + +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + // we flow only if there is no one listening + // for readable, but we still have to call + // resume() + state.flowing = !state.readableListening; + resume(this, state); + } + state.paused = false; + return this; +}; +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + process.nextTick(resume_, stream, state); + } +} +function resume_(stream, state) { + debug('resume', state.reading); + if (!state.reading) { + stream.read(0); + } + state.resumeScheduled = false; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (this._readableState.flowing !== false) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + this._readableState.paused = true; + return this; +}; +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null); +} + +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var _this = this; + var state = this._readableState; + var paused = false; + stream.on('end', function () { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); + } + _this.push(null); + }); + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); + + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + var ret = _this.push(chunk); + if (!ret) { + paused = true; + stream.pause(); + } + }); + + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function methodWrap(method) { + return function methodWrapReturnFunction() { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } + + // proxy certain important events. + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } + + // when we try to consume some more bytes, simply unpause the + // underlying stream. + this._read = function (n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; + return this; +}; +if (typeof Symbol === 'function') { + Readable.prototype[Symbol.asyncIterator] = function () { + if (createReadableStreamAsyncIterator === undefined) { + createReadableStreamAsyncIterator = __nccwpck_require__(4868); + } + return createReadableStreamAsyncIterator(this); + }; +} +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.highWaterMark; + } +}); +Object.defineProperty(Readable.prototype, 'readableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState && this._readableState.buffer; + } +}); +Object.defineProperty(Readable.prototype, 'readableFlowing', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.flowing; + }, + set: function set(state) { + if (this._readableState) { + this._readableState.flowing = state; + } + } +}); + +// exposed for testing purposes only. +Readable._fromList = fromList; +Object.defineProperty(Readable.prototype, 'readableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.length; + } +}); + +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = state.buffer.consume(n, state.decoder); + } + return ret; +} +function endReadable(stream) { + var state = stream._readableState; + debug('endReadable', state.endEmitted); + if (!state.endEmitted) { + state.ended = true; + process.nextTick(endReadableNT, state, stream); + } +} +function endReadableNT(state, stream) { + debug('endReadableNT', state.endEmitted, state.length); + + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the writable side is ready for autoDestroy as well + var wState = stream._writableState; + if (!wState || wState.autoDestroy && wState.finished) { + stream.destroy(); + } + } + } +} +if (typeof Symbol === 'function') { + Readable.from = function (iterable, opts) { + if (from === undefined) { + from = __nccwpck_require__(4659); + } + return from(Readable, iterable, opts); + }; +} +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} + +/***/ }), + +/***/ 2337: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. + + + +module.exports = Transform; +var _require$codes = (__nccwpck_require__(5500)/* .codes */ .F), + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING, + ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0; +var Duplex = __nccwpck_require__(2063); +__nccwpck_require__(9598)(Transform, Duplex); +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + var cb = ts.writecb; + if (cb === null) { + return this.emit('error', new ERR_MULTIPLE_CALLBACK()); + } + ts.writechunk = null; + ts.writecb = null; + if (data != null) + // single equals check for both `null` and `undefined` + this.push(data); + cb(er); + var rs = this._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); + } +} +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + Duplex.call(this, options); + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; + + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; + + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + if (typeof options.flush === 'function') this._flush = options.flush; + } + + // When the writable side finishes, then flush out anything remaining. + this.on('prefinish', prefinish); +} +function prefinish() { + var _this = this; + if (typeof this._flush === 'function' && !this._readableState.destroyed) { + this._flush(function (er, data) { + done(_this, er, data); + }); + } else { + done(this, null, null); + } +} +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; + +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()')); +}; +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; + +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; + if (ts.writechunk !== null && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; +Transform.prototype._destroy = function (err, cb) { + Duplex.prototype._destroy.call(this, err, function (err2) { + cb(err2); + }); +}; +function done(stream, er, data) { + if (er) return stream.emit('error', er); + if (data != null) + // single equals check for both `null` and `undefined` + stream.push(data); + + // TODO(BridgeAR): Write a test for these two error cases + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0(); + if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING(); + return stream.push(null); +} + +/***/ }), + +/***/ 8797: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. + + + +module.exports = Writable; + +/* */ +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} + +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + var _this = this; + this.next = null; + this.entry = null; + this.finish = function () { + onCorkedFinish(_this, state); + }; +} +/* */ + +/**/ +var Duplex; +/**/ + +Writable.WritableState = WritableState; + +/**/ +var internalUtil = { + deprecate: __nccwpck_require__(4488) +}; +/**/ + +/**/ +var Stream = __nccwpck_require__(3283); +/**/ + +var Buffer = (__nccwpck_require__(181).Buffer); +var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} +var destroyImpl = __nccwpck_require__(5089); +var _require = __nccwpck_require__(4874), + getHighWaterMark = _require.getHighWaterMark; +var _require$codes = (__nccwpck_require__(5500)/* .codes */ .F), + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED, + ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES, + ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END, + ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING; +var errorOrDestroy = destroyImpl.errorOrDestroy; +__nccwpck_require__(9598)(Writable, Stream); +function nop() {} +function WritableState(options, stream, isDuplex) { + Duplex = Duplex || __nccwpck_require__(2063); + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream, + // e.g. options.readableObjectMode vs. options.writableObjectMode, etc. + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; + + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); + + // if _final has been called + this.finalCalled = false; + + // drain event flag. + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; + + // has it been destroyed + this.destroyed = false; + + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; + + // a flag to see when we're in the middle of a write. + this.writing = false; + + // when true all writes will be buffered until .uncork() call + this.corked = 0; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; + + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; + + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + + // the amount that is being written when _write is called. + this.writelen = 0; + this.bufferedRequest = null; + this.lastBufferedRequest = null; + + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; + + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; + + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; + + // Should close be emitted on destroy. Defaults to true. + this.emitClose = options.emitClose !== false; + + // Should .destroy() be called after 'finish' (and potentially 'end') + this.autoDestroy = !!options.autoDestroy; + + // count buffered requests + this.bufferedRequestCount = 0; + + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); +} +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; + } + return out; +}; +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function writableStateBufferGetter() { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); + +// Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. +var realHasInstance; +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function value(object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; + return object && object._writableState instanceof WritableState; + } + }); +} else { + realHasInstance = function realHasInstance(object) { + return object instanceof this; + }; +} +function Writable(options) { + Duplex = Duplex || __nccwpck_require__(2063); + + // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. + + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + + // Checking for a Stream.Duplex instance is faster here instead of inside + // the WritableState constructor, at least with V8 6.5 + var isDuplex = this instanceof Duplex; + if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options); + this._writableState = new WritableState(options, this, isDuplex); + + // legacy. + this.writable = true; + if (options) { + if (typeof options.write === 'function') this._write = options.write; + if (typeof options.writev === 'function') this._writev = options.writev; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + if (typeof options.final === 'function') this._final = options.final; + } + Stream.call(this); +} + +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); +}; +function writeAfterEnd(stream, cb) { + var er = new ERR_STREAM_WRITE_AFTER_END(); + // TODO: defer error events consistently everywhere, not just the cb + errorOrDestroy(stream, er); + process.nextTick(cb, er); +} + +// Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. +function validChunk(stream, state, chunk, cb) { + var er; + if (chunk === null) { + er = new ERR_STREAM_NULL_VALUES(); + } else if (typeof chunk !== 'string' && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk); + } + if (er) { + errorOrDestroy(stream, er); + process.nextTick(cb, er); + return false; + } + return true; +} +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + var isBuf = !state.objectMode && _isUint8Array(chunk); + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + if (typeof cb !== 'function') cb = nop; + if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } + return ret; +}; +Writable.prototype.cork = function () { + this._writableState.corked++; +}; +Writable.prototype.uncork = function () { + var state = this._writableState; + if (state.corked) { + state.corked--; + if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; +Object.defineProperty(Writable.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + return chunk; +} +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); + +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; + } + } + var len = state.objectMode ? 1 : chunk.length; + state.length += len; + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk: chunk, + encoding: encoding, + isBuf: isBuf, + callback: cb, + next: null + }; + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + return ret; +} +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + process.nextTick(cb, er); + // this can emit finish, and it will always happen + // after error + process.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); + // this can emit finish, but finish must + // always follow error + finishMaybe(stream, state); + } +} +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK(); + onwriteStateUpdate(state); + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state) || stream.destroyed; + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + if (sync) { + process.nextTick(afterWrite, stream, state, finished, cb); + } else { + afterWrite(stream, state, finished, cb); + } + } +} +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} + +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} + +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + var count = 0; + var allBuffers = true; + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + buffer.allBuffers = allBuffers; + doWrite(stream, state, true, state.length, buffer, '', holder.finish); + + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } + } + if (entry === null) state.lastBufferedRequest = null; + } + state.bufferedRequest = entry; + state.bufferProcessing = false; +} +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()')); +}; +Writable.prototype._writev = null; +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); + + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } + + // ignore unnecessary end() calls. + if (!state.ending) endWritable(this, state, cb); + return this; +}; +Object.defineProperty(Writable.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} +function callFinal(stream, state) { + stream._final(function (err) { + state.pendingcb--; + if (err) { + errorOrDestroy(stream, err); + } + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); +} +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function' && !state.destroyed) { + state.pendingcb++; + state.finalCalled = true; + process.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); + } + } +} +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + prefinish(stream, state); + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the readable side is ready for autoDestroy as well + var rState = stream._readableState; + if (!rState || rState.autoDestroy && rState.endEmitted) { + stream.destroy(); + } + } + } + } + return need; +} +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) process.nextTick(cb);else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; +} +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } + + // reuse the free corkReq. + state.corkedRequestsFree.next = corkReq; +} +Object.defineProperty(Writable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._writableState === undefined) { + return false; + } + return this._writableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._writableState.destroyed = value; + } +}); +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; +Writable.prototype._destroy = function (err, cb) { + cb(err); +}; + +/***/ }), + +/***/ 4868: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var _Object$setPrototypeO; +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var finished = __nccwpck_require__(6815); +var kLastResolve = Symbol('lastResolve'); +var kLastReject = Symbol('lastReject'); +var kError = Symbol('error'); +var kEnded = Symbol('ended'); +var kLastPromise = Symbol('lastPromise'); +var kHandlePromise = Symbol('handlePromise'); +var kStream = Symbol('stream'); +function createIterResult(value, done) { + return { + value: value, + done: done + }; +} +function readAndResolve(iter) { + var resolve = iter[kLastResolve]; + if (resolve !== null) { + var data = iter[kStream].read(); + // we defer if data is null + // we can be expecting either 'end' or + // 'error' + if (data !== null) { + iter[kLastPromise] = null; + iter[kLastResolve] = null; + iter[kLastReject] = null; + resolve(createIterResult(data, false)); + } + } +} +function onReadable(iter) { + // we wait for the next tick, because it might + // emit an error with process.nextTick + process.nextTick(readAndResolve, iter); +} +function wrapForNext(lastPromise, iter) { + return function (resolve, reject) { + lastPromise.then(function () { + if (iter[kEnded]) { + resolve(createIterResult(undefined, true)); + return; + } + iter[kHandlePromise](resolve, reject); + }, reject); + }; +} +var AsyncIteratorPrototype = Object.getPrototypeOf(function () {}); +var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = { + get stream() { + return this[kStream]; + }, + next: function next() { + var _this = this; + // if we have detected an error in the meanwhile + // reject straight away + var error = this[kError]; + if (error !== null) { + return Promise.reject(error); + } + if (this[kEnded]) { + return Promise.resolve(createIterResult(undefined, true)); + } + if (this[kStream].destroyed) { + // We need to defer via nextTick because if .destroy(err) is + // called, the error will be emitted via nextTick, and + // we cannot guarantee that there is no error lingering around + // waiting to be emitted. + return new Promise(function (resolve, reject) { + process.nextTick(function () { + if (_this[kError]) { + reject(_this[kError]); + } else { + resolve(createIterResult(undefined, true)); + } + }); + }); + } + + // if we have multiple next() calls + // we will wait for the previous Promise to finish + // this logic is optimized to support for await loops, + // where next() is only called once at a time + var lastPromise = this[kLastPromise]; + var promise; + if (lastPromise) { + promise = new Promise(wrapForNext(lastPromise, this)); + } else { + // fast path needed to support multiple this.push() + // without triggering the next() queue + var data = this[kStream].read(); + if (data !== null) { + return Promise.resolve(createIterResult(data, false)); + } + promise = new Promise(this[kHandlePromise]); + } + this[kLastPromise] = promise; + return promise; + } +}, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () { + return this; +}), _defineProperty(_Object$setPrototypeO, "return", function _return() { + var _this2 = this; + // destroy(err, cb) is a private API + // we can guarantee we have that here, because we control the + // Readable class this is attached to + return new Promise(function (resolve, reject) { + _this2[kStream].destroy(null, function (err) { + if (err) { + reject(err); + return; + } + resolve(createIterResult(undefined, true)); + }); + }); +}), _Object$setPrototypeO), AsyncIteratorPrototype); +var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) { + var _Object$create; + var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, { + value: stream, + writable: true + }), _defineProperty(_Object$create, kLastResolve, { + value: null, + writable: true + }), _defineProperty(_Object$create, kLastReject, { + value: null, + writable: true + }), _defineProperty(_Object$create, kError, { + value: null, + writable: true + }), _defineProperty(_Object$create, kEnded, { + value: stream._readableState.endEmitted, + writable: true + }), _defineProperty(_Object$create, kHandlePromise, { + value: function value(resolve, reject) { + var data = iterator[kStream].read(); + if (data) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(data, false)); + } else { + iterator[kLastResolve] = resolve; + iterator[kLastReject] = reject; + } + }, + writable: true + }), _Object$create)); + iterator[kLastPromise] = null; + finished(stream, function (err) { + if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { + var reject = iterator[kLastReject]; + // reject if we are waiting for data in the Promise + // returned by next() and store the error + if (reject !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + reject(err); + } + iterator[kError] = err; + return; + } + var resolve = iterator[kLastResolve]; + if (resolve !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(undefined, true)); + } + iterator[kEnded] = true; + }); + stream.on('readable', onReadable.bind(null, iterator)); + return iterator; +}; +module.exports = createReadableStreamAsyncIterator; + +/***/ }), + +/***/ 7336: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } +function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, _toPropertyKey(descriptor.key), descriptor); } } +function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); Object.defineProperty(Constructor, "prototype", { writable: false }); return Constructor; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var _require = __nccwpck_require__(181), + Buffer = _require.Buffer; +var _require2 = __nccwpck_require__(9023), + inspect = _require2.inspect; +var custom = inspect && inspect.custom || 'inspect'; +function copyBuffer(src, target, offset) { + Buffer.prototype.copy.call(src, target, offset); +} +module.exports = /*#__PURE__*/function () { + function BufferList() { + _classCallCheck(this, BufferList); + this.head = null; + this.tail = null; + this.length = 0; + } + _createClass(BufferList, [{ + key: "push", + value: function push(v) { + var entry = { + data: v, + next: null + }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + } + }, { + key: "unshift", + value: function unshift(v) { + var entry = { + data: v, + next: this.head + }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + } + }, { + key: "shift", + value: function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + } + }, { + key: "clear", + value: function clear() { + this.head = this.tail = null; + this.length = 0; + } + }, { + key: "join", + value: function join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) ret += s + p.data; + return ret; + } + }, { + key: "concat", + value: function concat(n) { + if (this.length === 0) return Buffer.alloc(0); + var ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + return ret; + } + + // Consumes a specified amount of bytes or characters from the buffered data. + }, { + key: "consume", + value: function consume(n, hasStrings) { + var ret; + if (n < this.head.data.length) { + // `slice` is the same for buffers and strings. + ret = this.head.data.slice(0, n); + this.head.data = this.head.data.slice(n); + } else if (n === this.head.data.length) { + // First chunk is a perfect match. + ret = this.shift(); + } else { + // Result spans more than one buffer. + ret = hasStrings ? this._getString(n) : this._getBuffer(n); + } + return ret; + } + }, { + key: "first", + value: function first() { + return this.head.data; + } + + // Consumes a specified amount of characters from the buffered data. + }, { + key: "_getString", + value: function _getString(n) { + var p = this.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = str.slice(nb); + } + break; + } + ++c; + } + this.length -= c; + return ret; + } + + // Consumes a specified amount of bytes from the buffered data. + }, { + key: "_getBuffer", + value: function _getBuffer(n) { + var ret = Buffer.allocUnsafe(n); + var p = this.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = buf.slice(nb); + } + break; + } + ++c; + } + this.length -= c; + return ret; + } + + // Make sure the linked list only shows the minimal necessary information. + }, { + key: custom, + value: function value(_, options) { + return inspect(this, _objectSpread(_objectSpread({}, options), {}, { + // Only inspect one level. + depth: 0, + // It should not recurse. + customInspect: false + })); + } + }]); + return BufferList; +}(); + +/***/ }), + +/***/ 5089: +/***/ ((module) => { + +"use strict"; + + +// undocumented cb() API, needed for core, not for public API +function destroy(err, cb) { + var _this = this; + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err) { + if (!this._writableState) { + process.nextTick(emitErrorNT, this, err); + } else if (!this._writableState.errorEmitted) { + this._writableState.errorEmitted = true; + process.nextTick(emitErrorNT, this, err); + } + } + return this; + } + + // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks + + if (this._readableState) { + this._readableState.destroyed = true; + } + + // if this is a duplex stream mark the writable part as destroyed as well + if (this._writableState) { + this._writableState.destroyed = true; + } + this._destroy(err || null, function (err) { + if (!cb && err) { + if (!_this._writableState) { + process.nextTick(emitErrorAndCloseNT, _this, err); + } else if (!_this._writableState.errorEmitted) { + _this._writableState.errorEmitted = true; + process.nextTick(emitErrorAndCloseNT, _this, err); + } else { + process.nextTick(emitCloseNT, _this); + } + } else if (cb) { + process.nextTick(emitCloseNT, _this); + cb(err); + } else { + process.nextTick(emitCloseNT, _this); + } + }); + return this; +} +function emitErrorAndCloseNT(self, err) { + emitErrorNT(self, err); + emitCloseNT(self); +} +function emitCloseNT(self) { + if (self._writableState && !self._writableState.emitClose) return; + if (self._readableState && !self._readableState.emitClose) return; + self.emit('close'); +} +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; + } + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finalCalled = false; + this._writableState.prefinished = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; + } +} +function emitErrorNT(self, err) { + self.emit('error', err); +} +function errorOrDestroy(stream, err) { + // We have tests that rely on errors being emitted + // in the same tick, so changing this is semver major. + // For now when you opt-in to autoDestroy we allow + // the error to be emitted nextTick. In a future + // semver major update we should change the default to this. + + var rState = stream._readableState; + var wState = stream._writableState; + if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err); +} +module.exports = { + destroy: destroy, + undestroy: undestroy, + errorOrDestroy: errorOrDestroy +}; + +/***/ }), + +/***/ 6815: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Ported from https://github.com/mafintosh/end-of-stream with +// permission from the author, Mathias Buus (@mafintosh). + + + +var ERR_STREAM_PREMATURE_CLOSE = (__nccwpck_require__(5500)/* .codes */ .F).ERR_STREAM_PREMATURE_CLOSE; +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { + args[_key] = arguments[_key]; + } + callback.apply(this, args); + }; +} +function noop() {} +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} +function eos(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + callback = once(callback || noop); + var readable = opts.readable || opts.readable !== false && stream.readable; + var writable = opts.writable || opts.writable !== false && stream.writable; + var onlegacyfinish = function onlegacyfinish() { + if (!stream.writable) onfinish(); + }; + var writableEnded = stream._writableState && stream._writableState.finished; + var onfinish = function onfinish() { + writable = false; + writableEnded = true; + if (!readable) callback.call(stream); + }; + var readableEnded = stream._readableState && stream._readableState.endEmitted; + var onend = function onend() { + readable = false; + readableEnded = true; + if (!writable) callback.call(stream); + }; + var onerror = function onerror(err) { + callback.call(stream, err); + }; + var onclose = function onclose() { + var err; + if (readable && !readableEnded) { + if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + if (writable && !writableEnded) { + if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + }; + var onrequest = function onrequest() { + stream.req.on('finish', onfinish); + }; + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest();else stream.on('request', onrequest); + } else if (writable && !stream._writableState) { + // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + return function () { + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +} +module.exports = eos; + +/***/ }), + +/***/ 4659: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } +function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var ERR_INVALID_ARG_TYPE = (__nccwpck_require__(5500)/* .codes */ .F).ERR_INVALID_ARG_TYPE; +function from(Readable, iterable, opts) { + var iterator; + if (iterable && typeof iterable.next === 'function') { + iterator = iterable; + } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable); + var readable = new Readable(_objectSpread({ + objectMode: true + }, opts)); + // Reading boolean to protect against _read + // being called before last iteration completion. + var reading = false; + readable._read = function () { + if (!reading) { + reading = true; + next(); + } + }; + function next() { + return _next2.apply(this, arguments); + } + function _next2() { + _next2 = _asyncToGenerator(function* () { + try { + var _yield$iterator$next = yield iterator.next(), + value = _yield$iterator$next.value, + done = _yield$iterator$next.done; + if (done) { + readable.push(null); + } else if (readable.push(yield value)) { + next(); + } else { + reading = false; + } + } catch (err) { + readable.destroy(err); + } + }); + return _next2.apply(this, arguments); + } + return readable; +} +module.exports = from; + + +/***/ }), + +/***/ 6701: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Ported from https://github.com/mafintosh/pump with +// permission from the author, Mathias Buus (@mafintosh). + + + +var eos; +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + callback.apply(void 0, arguments); + }; +} +var _require$codes = (__nccwpck_require__(5500)/* .codes */ .F), + ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED; +function noop(err) { + // Rethrow the error if it exists to avoid swallowing it + if (err) throw err; +} +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} +function destroyer(stream, reading, writing, callback) { + callback = once(callback); + var closed = false; + stream.on('close', function () { + closed = true; + }); + if (eos === undefined) eos = __nccwpck_require__(6815); + eos(stream, { + readable: reading, + writable: writing + }, function (err) { + if (err) return callback(err); + closed = true; + callback(); + }); + var destroyed = false; + return function (err) { + if (closed) return; + if (destroyed) return; + destroyed = true; + + // request.destroy just do .end - .abort is what we want + if (isRequest(stream)) return stream.abort(); + if (typeof stream.destroy === 'function') return stream.destroy(); + callback(err || new ERR_STREAM_DESTROYED('pipe')); + }; +} +function call(fn) { + fn(); +} +function pipe(from, to) { + return from.pipe(to); +} +function popCallback(streams) { + if (!streams.length) return noop; + if (typeof streams[streams.length - 1] !== 'function') return noop; + return streams.pop(); +} +function pipeline() { + for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) { + streams[_key] = arguments[_key]; + } + var callback = popCallback(streams); + if (Array.isArray(streams[0])) streams = streams[0]; + if (streams.length < 2) { + throw new ERR_MISSING_ARGS('streams'); + } + var error; + var destroys = streams.map(function (stream, i) { + var reading = i < streams.length - 1; + var writing = i > 0; + return destroyer(stream, reading, writing, function (err) { + if (!error) error = err; + if (err) destroys.forEach(call); + if (reading) return; + destroys.forEach(call); + callback(error); + }); + }); + return streams.reduce(pipe); +} +module.exports = pipeline; + +/***/ }), + +/***/ 4874: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var ERR_INVALID_OPT_VALUE = (__nccwpck_require__(5500)/* .codes */ .F).ERR_INVALID_OPT_VALUE; +function highWaterMarkFrom(options, isDuplex, duplexKey) { + return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null; +} +function getHighWaterMark(state, options, duplexKey, isDuplex) { + var hwm = highWaterMarkFrom(options, isDuplex, duplexKey); + if (hwm != null) { + if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) { + var name = isDuplex ? duplexKey : 'highWaterMark'; + throw new ERR_INVALID_OPT_VALUE(name, hwm); + } + return Math.floor(hwm); + } + + // Default value + return state.objectMode ? 16 : 16 * 1024; +} +module.exports = { + getHighWaterMark: getHighWaterMark +}; + +/***/ }), + +/***/ 3283: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +module.exports = __nccwpck_require__(2203); + + +/***/ }), + +/***/ 6131: +/***/ ((module, exports, __nccwpck_require__) => { + +var Stream = __nccwpck_require__(2203); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream.Readable; + Object.assign(module.exports, Stream); + module.exports.Stream = Stream; +} else { + exports = module.exports = __nccwpck_require__(9274); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = __nccwpck_require__(8797); + exports.Duplex = __nccwpck_require__(2063); + exports.Transform = __nccwpck_require__(2337); + exports.PassThrough = __nccwpck_require__(5283); + exports.finished = __nccwpck_require__(6815); + exports.pipeline = __nccwpck_require__(6701); +} + + +/***/ }), + +/***/ 7842: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const {PassThrough} = __nccwpck_require__(2203); +const extend = __nccwpck_require__(3860); + +let debug = () => {}; +if ( + typeof process !== 'undefined' && + 'env' in process && + typeof process.env === 'object' && + process.env.DEBUG === 'retry-request' +) { + debug = message => { + console.log('retry-request:', message); + }; +} + +const DEFAULTS = { + objectMode: false, + retries: 2, + + /* + The maximum time to delay in seconds. If retryDelayMultiplier results in a + delay greater than maxRetryDelay, retries should delay by maxRetryDelay + seconds instead. + */ + maxRetryDelay: 64, + + /* + The multiplier by which to increase the delay time between the completion of + failed requests, and the initiation of the subsequent retrying request. + */ + retryDelayMultiplier: 2, + + /* + The length of time to keep retrying in seconds. The last sleep period will + be shortened as necessary, so that the last retry runs at deadline (and not + considerably beyond it). The total time starting from when the initial + request is sent, after which an error will be returned, regardless of the + retrying attempts made meanwhile. + */ + totalTimeout: 600, + + noResponseRetries: 2, + currentRetryAttempt: 0, + shouldRetryFn: function (response) { + const retryRanges = [ + // https://en.wikipedia.org/wiki/List_of_HTTP_status_codes + // 1xx - Retry (Informational, request still processing) + // 2xx - Do not retry (Success) + // 3xx - Do not retry (Redirect) + // 4xx - Do not retry (Client errors) + // 429 - Retry ("Too Many Requests") + // 5xx - Retry (Server errors) + [100, 199], + [429, 429], + [500, 599], + ]; + + const statusCode = response.statusCode; + debug(`Response status: ${statusCode}`); + + let range; + while ((range = retryRanges.shift())) { + if (statusCode >= range[0] && statusCode <= range[1]) { + // Not a successful status or redirect. + return true; + } + } + }, +}; + +function retryRequest(requestOpts, opts, callback) { + if (typeof requestOpts === 'string') { + requestOpts = {url: requestOpts}; + } + + const streamMode = typeof arguments[arguments.length - 1] !== 'function'; + + if (typeof opts === 'function') { + callback = opts; + } + + const manualCurrentRetryAttemptWasSet = + opts && typeof opts.currentRetryAttempt === 'number'; + opts = extend({}, DEFAULTS, opts); + + if (typeof opts.request === 'undefined') { + throw new Error('A request library must be provided to retry-request.'); + } + + let currentRetryAttempt = opts.currentRetryAttempt; + + let numNoResponseAttempts = 0; + let streamResponseHandled = false; + + let retryStream; + let requestStream; + let delayStream; + + let activeRequest; + const retryRequest = { + abort: function () { + if (activeRequest && activeRequest.abort) { + activeRequest.abort(); + } + }, + }; + + if (streamMode) { + retryStream = new PassThrough({objectMode: opts.objectMode}); + retryStream.abort = resetStreams; + } + + const timeOfFirstRequest = Date.now(); + if (currentRetryAttempt > 0) { + retryAfterDelay(currentRetryAttempt); + } else { + makeRequest(); + } + + if (streamMode) { + return retryStream; + } else { + return retryRequest; + } + + function resetStreams() { + delayStream = null; + + if (requestStream) { + requestStream.abort && requestStream.abort(); + requestStream.cancel && requestStream.cancel(); + + if (requestStream.destroy) { + requestStream.destroy(); + } else if (requestStream.end) { + requestStream.end(); + } + } + } + + function makeRequest() { + let finishHandled = false; + currentRetryAttempt++; + debug(`Current retry attempt: ${currentRetryAttempt}`); + + function handleFinish(args = []) { + if (!finishHandled) { + finishHandled = true; + retryStream.emit('complete', ...args); + } + } + + if (streamMode) { + streamResponseHandled = false; + + delayStream = new PassThrough({objectMode: opts.objectMode}); + requestStream = opts.request(requestOpts); + + setImmediate(() => { + retryStream.emit('request'); + }); + + requestStream + // gRPC via google-cloud-node can emit an `error` as well as a `response` + // Whichever it emits, we run with-- we can't run with both. That's what + // is up with the `streamResponseHandled` tracking. + .on('error', err => { + if (streamResponseHandled) { + return; + } + + streamResponseHandled = true; + onResponse(err); + }) + .on('response', (resp, body) => { + if (streamResponseHandled) { + return; + } + + streamResponseHandled = true; + onResponse(null, resp, body); + }) + .on('complete', (...params) => handleFinish(params)) + .on('finish', (...params) => handleFinish(params)); + + requestStream.pipe(delayStream); + } else { + activeRequest = opts.request(requestOpts, onResponse); + } + } + + function retryAfterDelay(currentRetryAttempt) { + if (streamMode) { + resetStreams(); + } + + const nextRetryDelay = getNextRetryDelay({ + maxRetryDelay: opts.maxRetryDelay, + retryDelayMultiplier: opts.retryDelayMultiplier, + retryNumber: currentRetryAttempt, + timeOfFirstRequest, + totalTimeout: opts.totalTimeout, + }); + debug(`Next retry delay: ${nextRetryDelay}`); + + if (nextRetryDelay <= 0) { + numNoResponseAttempts = opts.noResponseRetries + 1; + return; + } + + setTimeout(makeRequest, nextRetryDelay); + } + + function onResponse(err, response, body) { + // An error such as DNS resolution. + if (err) { + numNoResponseAttempts++; + + if (numNoResponseAttempts <= opts.noResponseRetries) { + retryAfterDelay(numNoResponseAttempts); + } else { + if (streamMode) { + retryStream.emit('error', err); + retryStream.end(); + } else { + callback(err, response, body); + } + } + + return; + } + + // Send the response to see if we should try again. + // NOTE: "currentRetryAttempt" isn't accurate by default, as it counts + // the very first request sent as the first "retry". It is only accurate + // when a user provides their own "currentRetryAttempt" option at + // instantiation. + const adjustedCurrentRetryAttempt = manualCurrentRetryAttemptWasSet + ? currentRetryAttempt + : currentRetryAttempt - 1; + if ( + adjustedCurrentRetryAttempt < opts.retries && + opts.shouldRetryFn(response) + ) { + retryAfterDelay(currentRetryAttempt); + return; + } + + // No more attempts need to be made, just continue on. + if (streamMode) { + retryStream.emit('response', response); + delayStream.pipe(retryStream); + requestStream.on('error', err => { + retryStream.destroy(err); + }); + } else { + callback(err, response, body); + } + } +} + +module.exports = retryRequest; + +function getNextRetryDelay(config) { + const { + maxRetryDelay, + retryDelayMultiplier, + retryNumber, + timeOfFirstRequest, + totalTimeout, + } = config; + + const maxRetryDelayMs = maxRetryDelay * 1000; + const totalTimeoutMs = totalTimeout * 1000; + + const jitter = Math.floor(Math.random() * 1000); + const calculatedNextRetryDelay = + Math.pow(retryDelayMultiplier, retryNumber) * 1000 + jitter; + + const maxAllowableDelayMs = + totalTimeoutMs - (Date.now() - timeOfFirstRequest); + + return Math.min( + calculatedNextRetryDelay, + maxAllowableDelayMs, + maxRetryDelayMs + ); +} + +module.exports.defaults = DEFAULTS; +module.exports.getNextRetryDelay = getNextRetryDelay; + + +/***/ }), + +/***/ 5546: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +module.exports = __nccwpck_require__(7084); + +/***/ }), + +/***/ 7084: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +var RetryOperation = __nccwpck_require__(9538); + +exports.operation = function(options) { + var timeouts = exports.timeouts(options); + return new RetryOperation(timeouts, { + forever: options && (options.forever || options.retries === Infinity), + unref: options && options.unref, + maxRetryTime: options && options.maxRetryTime + }); +}; + +exports.timeouts = function(options) { + if (options instanceof Array) { + return [].concat(options); + } + + var opts = { + retries: 10, + factor: 2, + minTimeout: 1 * 1000, + maxTimeout: Infinity, + randomize: false + }; + for (var key in options) { + opts[key] = options[key]; + } + + if (opts.minTimeout > opts.maxTimeout) { + throw new Error('minTimeout is greater than maxTimeout'); + } + + var timeouts = []; + for (var i = 0; i < opts.retries; i++) { + timeouts.push(this.createTimeout(i, opts)); + } + + if (options && options.forever && !timeouts.length) { + timeouts.push(this.createTimeout(i, opts)); + } + + // sort the array numerically ascending + timeouts.sort(function(a,b) { + return a - b; + }); + + return timeouts; +}; + +exports.createTimeout = function(attempt, opts) { + var random = (opts.randomize) + ? (Math.random() + 1) + : 1; + + var timeout = Math.round(random * Math.max(opts.minTimeout, 1) * Math.pow(opts.factor, attempt)); + timeout = Math.min(timeout, opts.maxTimeout); + + return timeout; +}; + +exports.wrap = function(obj, options, methods) { + if (options instanceof Array) { + methods = options; + options = null; + } + + if (!methods) { + methods = []; + for (var key in obj) { + if (typeof obj[key] === 'function') { + methods.push(key); + } + } + } + + for (var i = 0; i < methods.length; i++) { + var method = methods[i]; + var original = obj[method]; + + obj[method] = function retryWrapper(original) { + var op = exports.operation(options); + var args = Array.prototype.slice.call(arguments, 1); + var callback = args.pop(); + + args.push(function(err) { + if (op.retry(err)) { + return; + } + if (err) { + arguments[0] = op.mainError(); + } + callback.apply(this, arguments); + }); + + op.attempt(function() { + original.apply(obj, args); + }); + }.bind(obj, original); + obj[method].options = options; + } +}; + + +/***/ }), + +/***/ 9538: +/***/ ((module) => { + +function RetryOperation(timeouts, options) { + // Compatibility for the old (timeouts, retryForever) signature + if (typeof options === 'boolean') { + options = { forever: options }; + } + + this._originalTimeouts = JSON.parse(JSON.stringify(timeouts)); + this._timeouts = timeouts; + this._options = options || {}; + this._maxRetryTime = options && options.maxRetryTime || Infinity; + this._fn = null; + this._errors = []; + this._attempts = 1; + this._operationTimeout = null; + this._operationTimeoutCb = null; + this._timeout = null; + this._operationStart = null; + this._timer = null; + + if (this._options.forever) { + this._cachedTimeouts = this._timeouts.slice(0); + } +} +module.exports = RetryOperation; + +RetryOperation.prototype.reset = function() { + this._attempts = 1; + this._timeouts = this._originalTimeouts.slice(0); +} + +RetryOperation.prototype.stop = function() { + if (this._timeout) { + clearTimeout(this._timeout); + } + if (this._timer) { + clearTimeout(this._timer); + } + + this._timeouts = []; + this._cachedTimeouts = null; +}; + +RetryOperation.prototype.retry = function(err) { + if (this._timeout) { + clearTimeout(this._timeout); + } + + if (!err) { + return false; + } + var currentTime = new Date().getTime(); + if (err && currentTime - this._operationStart >= this._maxRetryTime) { + this._errors.push(err); + this._errors.unshift(new Error('RetryOperation timeout occurred')); + return false; + } + + this._errors.push(err); + + var timeout = this._timeouts.shift(); + if (timeout === undefined) { + if (this._cachedTimeouts) { + // retry forever, only keep last error + this._errors.splice(0, this._errors.length - 1); + timeout = this._cachedTimeouts.slice(-1); + } else { + return false; + } + } + + var self = this; + this._timer = setTimeout(function() { + self._attempts++; + + if (self._operationTimeoutCb) { + self._timeout = setTimeout(function() { + self._operationTimeoutCb(self._attempts); + }, self._operationTimeout); + + if (self._options.unref) { + self._timeout.unref(); + } + } + + self._fn(self._attempts); + }, timeout); + + if (this._options.unref) { + this._timer.unref(); + } + + return true; +}; + +RetryOperation.prototype.attempt = function(fn, timeoutOps) { + this._fn = fn; + + if (timeoutOps) { + if (timeoutOps.timeout) { + this._operationTimeout = timeoutOps.timeout; + } + if (timeoutOps.cb) { + this._operationTimeoutCb = timeoutOps.cb; + } + } + + var self = this; + if (this._operationTimeoutCb) { + this._timeout = setTimeout(function() { + self._operationTimeoutCb(); + }, self._operationTimeout); + } + + this._operationStart = new Date().getTime(); + + this._fn(this._attempts); +}; + +RetryOperation.prototype.try = function(fn) { + console.log('Using RetryOperation.try() is deprecated'); + this.attempt(fn); +}; + +RetryOperation.prototype.start = function(fn) { + console.log('Using RetryOperation.start() is deprecated'); + this.attempt(fn); +}; + +RetryOperation.prototype.start = RetryOperation.prototype.try; + +RetryOperation.prototype.errors = function() { + return this._errors; +}; + +RetryOperation.prototype.attempts = function() { + return this._attempts; +}; + +RetryOperation.prototype.mainError = function() { + if (this._errors.length === 0) { + return null; + } + + var counts = {}; + var mainError = null; + var mainErrorCount = 0; + + for (var i = 0; i < this._errors.length; i++) { + var error = this._errors[i]; + var message = error.message; + var count = (counts[message] || 0) + 1; + + counts[message] = count; + + if (count >= mainErrorCount) { + mainError = error; + mainErrorCount = count; + } + } + + return mainError; +}; + + +/***/ }), + +/***/ 3058: +/***/ ((module, exports, __nccwpck_require__) => { + +/* eslint-disable node/no-deprecated-api */ +var buffer = __nccwpck_require__(181) +var Buffer = buffer.Buffer + +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } +} +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer +} + +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) +} + +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) + +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') + } + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + } else { + buf.fill(0) + } + return buf +} + +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) +} + +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return buffer.SlowBuffer(size) +} + + /***/ }), /***/ 2560: @@ -59607,6 +83504,2463 @@ function coerce (version, options) { } +/***/ }), + +/***/ 1546: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var stubs = __nccwpck_require__(3897) + +/* + * StreamEvents can be used 2 ways: + * + * 1: + * function MyStream() { + * require('stream-events').call(this) + * } + * + * 2: + * require('stream-events')(myStream) + */ +function StreamEvents(stream) { + stream = stream || this + + var cfg = { + callthrough: true, + calls: 1 + } + + stubs(stream, '_read', cfg, stream.emit.bind(stream, 'reading')) + stubs(stream, '_write', cfg, stream.emit.bind(stream, 'writing')) + + return stream +} + +module.exports = StreamEvents + + +/***/ }), + +/***/ 4633: +/***/ ((module) => { + +module.exports = shift + +function shift (stream) { + var rs = stream._readableState + if (!rs) return null + return (rs.objectMode || typeof stream._duplexState === 'number') ? stream.read() : stream.read(getStateLength(rs)) +} + +function getStateLength (state) { + if (state.buffer.length) { + var idx = state.bufferIndex || 0 + // Since node 6.3.0 state.buffer is a BufferList not an array + if (state.buffer.head) { + return state.buffer.head.data.length + } else if (state.buffer.length - idx > 0 && state.buffer[idx]) { + return state.buffer[idx].length + } + } + + return state.length +} + + +/***/ }), + +/***/ 634: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + +/**/ + +var Buffer = (__nccwpck_require__(6132).Buffer); +/**/ + +var isEncoding = Buffer.isEncoding || function (encoding) { + encoding = '' + encoding; + switch (encoding && encoding.toLowerCase()) { + case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': + return true; + default: + return false; + } +}; + +function _normalizeEncoding(enc) { + if (!enc) return 'utf8'; + var retried; + while (true) { + switch (enc) { + case 'utf8': + case 'utf-8': + return 'utf8'; + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return 'utf16le'; + case 'latin1': + case 'binary': + return 'latin1'; + case 'base64': + case 'ascii': + case 'hex': + return enc; + default: + if (retried) return; // undefined + enc = ('' + enc).toLowerCase(); + retried = true; + } + } +}; + +// Do not cache `Buffer.isEncoding` when checking encoding names as some +// modules monkey-patch it to support additional encodings +function normalizeEncoding(enc) { + var nenc = _normalizeEncoding(enc); + if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); + return nenc || enc; +} + +// StringDecoder provides an interface for efficiently splitting a series of +// buffers into a series of JS strings without breaking apart multi-byte +// characters. +exports.I = StringDecoder; +function StringDecoder(encoding) { + this.encoding = normalizeEncoding(encoding); + var nb; + switch (this.encoding) { + case 'utf16le': + this.text = utf16Text; + this.end = utf16End; + nb = 4; + break; + case 'utf8': + this.fillLast = utf8FillLast; + nb = 4; + break; + case 'base64': + this.text = base64Text; + this.end = base64End; + nb = 3; + break; + default: + this.write = simpleWrite; + this.end = simpleEnd; + return; + } + this.lastNeed = 0; + this.lastTotal = 0; + this.lastChar = Buffer.allocUnsafe(nb); +} + +StringDecoder.prototype.write = function (buf) { + if (buf.length === 0) return ''; + var r; + var i; + if (this.lastNeed) { + r = this.fillLast(buf); + if (r === undefined) return ''; + i = this.lastNeed; + this.lastNeed = 0; + } else { + i = 0; + } + if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); + return r || ''; +}; + +StringDecoder.prototype.end = utf8End; + +// Returns only complete characters in a Buffer +StringDecoder.prototype.text = utf8Text; + +// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer +StringDecoder.prototype.fillLast = function (buf) { + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); + this.lastNeed -= buf.length; +}; + +// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a +// continuation byte. If an invalid byte is detected, -2 is returned. +function utf8CheckByte(byte) { + if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; + return byte >> 6 === 0x02 ? -1 : -2; +} + +// Checks at most 3 bytes at the end of a Buffer in order to detect an +// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) +// needed to complete the UTF-8 character (if applicable) are returned. +function utf8CheckIncomplete(self, buf, i) { + var j = buf.length - 1; + if (j < i) return 0; + var nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 1; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 2; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) { + if (nb === 2) nb = 0;else self.lastNeed = nb - 3; + } + return nb; + } + return 0; +} + +// Validates as many continuation bytes for a multi-byte UTF-8 character as +// needed or are available. If we see a non-continuation byte where we expect +// one, we "replace" the validated continuation bytes we've seen so far with +// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding +// behavior. The continuation byte check is included three times in the case +// where all of the continuation bytes for a character exist in the same buffer. +// It is also done this way as a slight performance increase instead of using a +// loop. +function utf8CheckExtraBytes(self, buf, p) { + if ((buf[0] & 0xC0) !== 0x80) { + self.lastNeed = 0; + return '\ufffd'; + } + if (self.lastNeed > 1 && buf.length > 1) { + if ((buf[1] & 0xC0) !== 0x80) { + self.lastNeed = 1; + return '\ufffd'; + } + if (self.lastNeed > 2 && buf.length > 2) { + if ((buf[2] & 0xC0) !== 0x80) { + self.lastNeed = 2; + return '\ufffd'; + } + } + } +} + +// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. +function utf8FillLast(buf) { + var p = this.lastTotal - this.lastNeed; + var r = utf8CheckExtraBytes(this, buf, p); + if (r !== undefined) return r; + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, p, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, p, 0, buf.length); + this.lastNeed -= buf.length; +} + +// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a +// partial character, the character's bytes are buffered until the required +// number of bytes are available. +function utf8Text(buf, i) { + var total = utf8CheckIncomplete(this, buf, i); + if (!this.lastNeed) return buf.toString('utf8', i); + this.lastTotal = total; + var end = buf.length - (total - this.lastNeed); + buf.copy(this.lastChar, 0, end); + return buf.toString('utf8', i, end); +} + +// For UTF-8, a replacement character is added when ending on a partial +// character. +function utf8End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + '\ufffd'; + return r; +} + +// UTF-16LE typically needs two bytes per character, but even if we have an even +// number of bytes available, we need to check if we end on a leading/high +// surrogate. In that case, we need to wait for the next two bytes in order to +// decode the last character properly. +function utf16Text(buf, i) { + if ((buf.length - i) % 2 === 0) { + var r = buf.toString('utf16le', i); + if (r) { + var c = r.charCodeAt(r.length - 1); + if (c >= 0xD800 && c <= 0xDBFF) { + this.lastNeed = 2; + this.lastTotal = 4; + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + return r.slice(0, -1); + } + } + return r; + } + this.lastNeed = 1; + this.lastTotal = 2; + this.lastChar[0] = buf[buf.length - 1]; + return buf.toString('utf16le', i, buf.length - 1); +} + +// For UTF-16LE we do not explicitly append special replacement characters if we +// end on a partial character, we simply let v8 handle that. +function utf16End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) { + var end = this.lastTotal - this.lastNeed; + return r + this.lastChar.toString('utf16le', 0, end); + } + return r; +} + +function base64Text(buf, i) { + var n = (buf.length - i) % 3; + if (n === 0) return buf.toString('base64', i); + this.lastNeed = 3 - n; + this.lastTotal = 3; + if (n === 1) { + this.lastChar[0] = buf[buf.length - 1]; + } else { + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + } + return buf.toString('base64', i, buf.length - n); +} + +function base64End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); + return r; +} + +// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) +function simpleWrite(buf) { + return buf.toString(this.encoding); +} + +function simpleEnd(buf) { + return buf && buf.length ? this.write(buf) : ''; +} + +/***/ }), + +/***/ 6132: +/***/ ((module, exports, __nccwpck_require__) => { + +/*! safe-buffer. MIT License. Feross Aboukhadijeh */ +/* eslint-disable node/no-deprecated-api */ +var buffer = __nccwpck_require__(181) +var Buffer = buffer.Buffer + +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } +} +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer +} + +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.prototype = Object.create(Buffer.prototype) + +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) + +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') + } + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + } else { + buf.fill(0) + } + return buf +} + +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) +} + +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return buffer.SlowBuffer(size) +} + + +/***/ }), + +/***/ 6496: +/***/ ((module) => { + +const hexRegex = /^[-+]?0x[a-fA-F0-9]+$/; +const numRegex = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/; +// const octRegex = /^0x[a-z0-9]+/; +// const binRegex = /0x[a-z0-9]+/; + + +const consider = { + hex : true, + // oct: false, + leadingZeros: true, + decimalPoint: "\.", + eNotation: true, + //skipLike: /regex/ +}; + +function toNumber(str, options = {}){ + options = Object.assign({}, consider, options ); + if(!str || typeof str !== "string" ) return str; + + let trimmedStr = str.trim(); + + if(options.skipLike !== undefined && options.skipLike.test(trimmedStr)) return str; + else if(str==="0") return 0; + else if (options.hex && hexRegex.test(trimmedStr)) { + return parse_int(trimmedStr, 16); + // }else if (options.oct && octRegex.test(str)) { + // return Number.parseInt(val, 8); + }else if (trimmedStr.search(/[eE]/)!== -1) { //eNotation + const notation = trimmedStr.match(/^([-\+])?(0*)([0-9]*(\.[0-9]*)?[eE][-\+]?[0-9]+)$/); + // +00.123 => [ , '+', '00', '.123', .. + if(notation){ + // console.log(notation) + if(options.leadingZeros){ //accept with leading zeros + trimmedStr = (notation[1] || "") + notation[3]; + }else{ + if(notation[2] === "0" && notation[3][0]=== "."){ //valid number + }else{ + return str; + } + } + return options.eNotation ? Number(trimmedStr) : str; + }else{ + return str; + } + // }else if (options.parseBin && binRegex.test(str)) { + // return Number.parseInt(val, 2); + }else{ + //separate negative sign, leading zeros, and rest number + const match = numRegex.exec(trimmedStr); + // +00.123 => [ , '+', '00', '.123', .. + if(match){ + const sign = match[1]; + const leadingZeros = match[2]; + let numTrimmedByZeros = trimZeros(match[3]); //complete num without leading zeros + //trim ending zeros for floating number + + if(!options.leadingZeros && leadingZeros.length > 0 && sign && trimmedStr[2] !== ".") return str; //-0123 + else if(!options.leadingZeros && leadingZeros.length > 0 && !sign && trimmedStr[1] !== ".") return str; //0123 + else if(options.leadingZeros && leadingZeros===str) return 0; //00 + + else{//no leading zeros or leading zeros are allowed + const num = Number(trimmedStr); + const numStr = "" + num; + + if(numStr.search(/[eE]/) !== -1){ //given number is long and parsed to eNotation + if(options.eNotation) return num; + else return str; + }else if(trimmedStr.indexOf(".") !== -1){ //floating number + if(numStr === "0" && (numTrimmedByZeros === "") ) return num; //0.0 + else if(numStr === numTrimmedByZeros) return num; //0.456. 0.79000 + else if( sign && numStr === "-"+numTrimmedByZeros) return num; + else return str; + } + + if(leadingZeros){ + return (numTrimmedByZeros === numStr) || (sign+numTrimmedByZeros === numStr) ? num : str + }else { + return (trimmedStr === numStr) || (trimmedStr === sign+numStr) ? num : str + } + } + }else{ //non-numeric string + return str; + } + } +} + +/** + * + * @param {string} numStr without leading zeros + * @returns + */ +function trimZeros(numStr){ + if(numStr && numStr.indexOf(".") !== -1){//float + numStr = numStr.replace(/0+$/, ""); //remove ending zeros + if(numStr === ".") numStr = "0"; + else if(numStr[0] === ".") numStr = "0"+numStr; + else if(numStr[numStr.length-1] === ".") numStr = numStr.substr(0,numStr.length-1); + return numStr; + } + return numStr; +} + +function parse_int(numStr, base){ + //polyfill + if(parseInt) return parseInt(numStr, base); + else if(Number.parseInt) return Number.parseInt(numStr, base); + else if(window && window.parseInt) return window.parseInt(numStr, base); + else throw new Error("parseInt, Number.parseInt, window.parseInt are not supported") +} + +module.exports = toNumber; + +/***/ }), + +/***/ 3897: +/***/ ((module) => { + +"use strict"; + + +module.exports = function stubs(obj, method, cfg, stub) { + if (!obj || !method || !obj[method]) + throw new Error('You must provide an object and a key for an existing method') + + if (!stub) { + stub = cfg + cfg = {} + } + + stub = stub || function() {} + + cfg.callthrough = cfg.callthrough || false + cfg.calls = cfg.calls || 0 + + var norevert = cfg.calls === 0 + + var cached = obj[method].bind(obj) + + obj[method] = function() { + var args = [].slice.call(arguments) + var returnVal + + if (cfg.callthrough) + returnVal = cached.apply(obj, args) + + returnVal = stub.apply(obj, args) || returnVal + + if (!norevert && --cfg.calls === 0) + obj[method] = cached + + return returnVal + } +} + + +/***/ }), + +/***/ 1450: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +const os = __nccwpck_require__(857); +const tty = __nccwpck_require__(2018); +const hasFlag = __nccwpck_require__(3813); + +const {env} = process; + +let forceColor; +if (hasFlag('no-color') || + hasFlag('no-colors') || + hasFlag('color=false') || + hasFlag('color=never')) { + forceColor = 0; +} else if (hasFlag('color') || + hasFlag('colors') || + hasFlag('color=true') || + hasFlag('color=always')) { + forceColor = 1; +} + +if ('FORCE_COLOR' in env) { + if (env.FORCE_COLOR === 'true') { + forceColor = 1; + } else if (env.FORCE_COLOR === 'false') { + forceColor = 0; + } else { + forceColor = env.FORCE_COLOR.length === 0 ? 1 : Math.min(parseInt(env.FORCE_COLOR, 10), 3); + } +} + +function translateLevel(level) { + if (level === 0) { + return false; + } + + return { + level, + hasBasic: true, + has256: level >= 2, + has16m: level >= 3 + }; +} + +function supportsColor(haveStream, streamIsTTY) { + if (forceColor === 0) { + return 0; + } + + if (hasFlag('color=16m') || + hasFlag('color=full') || + hasFlag('color=truecolor')) { + return 3; + } + + if (hasFlag('color=256')) { + return 2; + } + + if (haveStream && !streamIsTTY && forceColor === undefined) { + return 0; + } + + const min = forceColor || 0; + + if (env.TERM === 'dumb') { + return min; + } + + if (process.platform === 'win32') { + // Windows 10 build 10586 is the first Windows release that supports 256 colors. + // Windows 10 build 14931 is the first release that supports 16m/TrueColor. + const osRelease = os.release().split('.'); + if ( + Number(osRelease[0]) >= 10 && + Number(osRelease[2]) >= 10586 + ) { + return Number(osRelease[2]) >= 14931 ? 3 : 2; + } + + return 1; + } + + if ('CI' in env) { + if (['TRAVIS', 'CIRCLECI', 'APPVEYOR', 'GITLAB_CI', 'GITHUB_ACTIONS', 'BUILDKITE'].some(sign => sign in env) || env.CI_NAME === 'codeship') { + return 1; + } + + return min; + } + + if ('TEAMCITY_VERSION' in env) { + return /^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0; + } + + if (env.COLORTERM === 'truecolor') { + return 3; + } + + if ('TERM_PROGRAM' in env) { + const version = parseInt((env.TERM_PROGRAM_VERSION || '').split('.')[0], 10); + + switch (env.TERM_PROGRAM) { + case 'iTerm.app': + return version >= 3 ? 3 : 2; + case 'Apple_Terminal': + return 2; + // No default + } + } + + if (/-256(color)?$/i.test(env.TERM)) { + return 2; + } + + if (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) { + return 1; + } + + if ('COLORTERM' in env) { + return 1; + } + + return min; +} + +function getSupportLevel(stream) { + const level = supportsColor(stream, stream && stream.isTTY); + return translateLevel(level); +} + +module.exports = { + supportsColor: getSupportLevel, + stdout: translateLevel(supportsColor(true, tty.isatty(1))), + stderr: translateLevel(supportsColor(true, tty.isatty(2))) +}; + + +/***/ }), + +/***/ 7745: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/** + * @license + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.TeenyStatistics = exports.TeenyStatisticsWarning = void 0; +/** + * @class TeenyStatisticsWarning + * @extends Error + * @description While an error, is used for emitting warnings when + * meeting certain configured thresholds. + * @see process.emitWarning + */ +class TeenyStatisticsWarning extends Error { + /** + * @param {string} message + */ + constructor(message) { + super(message); + this.threshold = 0; + this.type = ''; + this.value = 0; + this.name = this.constructor.name; + Error.captureStackTrace(this, this.constructor); + } +} +exports.TeenyStatisticsWarning = TeenyStatisticsWarning; +TeenyStatisticsWarning.CONCURRENT_REQUESTS = 'ConcurrentRequestsExceededWarning'; +/** + * @class TeenyStatistics + * @description Maintain various statistics internal to teeny-request. Tracking + * is not automatic and must be instrumented within teeny-request. + */ +class TeenyStatistics { + /** + * @param {TeenyStatisticsOptions} [opts] + */ + constructor(opts) { + /** + * @type {number} + * @private + * @default 0 + */ + this._concurrentRequests = 0; + /** + * @type {boolean} + * @private + * @default false + */ + this._didConcurrentRequestWarn = false; + this._options = TeenyStatistics._prepareOptions(opts); + } + /** + * Returns a copy of the current options. + * @return {TeenyStatisticsOptions} + */ + getOptions() { + return Object.assign({}, this._options); + } + /** + * Change configured statistics options. This will not preserve unspecified + * options that were previously specified, i.e. this is a reset of options. + * @param {TeenyStatisticsOptions} [opts] + * @returns {TeenyStatisticsConfig} The previous options. + * @see _prepareOptions + */ + setOptions(opts) { + const oldOpts = this._options; + this._options = TeenyStatistics._prepareOptions(opts); + return oldOpts; + } + /** + * @readonly + * @return {TeenyStatisticsCounters} + */ + get counters() { + return { + concurrentRequests: this._concurrentRequests, + }; + } + /** + * @description Should call this right before making a request. + */ + requestStarting() { + this._concurrentRequests++; + if (this._options.concurrentRequests > 0 && + this._concurrentRequests >= this._options.concurrentRequests && + !this._didConcurrentRequestWarn) { + this._didConcurrentRequestWarn = true; + const warning = new TeenyStatisticsWarning('Possible excessive concurrent requests detected. ' + + this._concurrentRequests + + ' requests in-flight, which exceeds the configured threshold of ' + + this._options.concurrentRequests + + '. Use the TEENY_REQUEST_WARN_CONCURRENT_REQUESTS environment ' + + 'variable or the concurrentRequests option of teeny-request to ' + + 'increase or disable (0) this warning.'); + warning.type = TeenyStatisticsWarning.CONCURRENT_REQUESTS; + warning.value = this._concurrentRequests; + warning.threshold = this._options.concurrentRequests; + process.emitWarning(warning); + } + } + /** + * @description When using `requestStarting`, call this after the request + * has finished. + */ + requestFinished() { + // TODO negative? + this._concurrentRequests--; + } + /** + * Configuration Precedence: + * 1. Dependency inversion via defined option. + * 2. Global numeric environment variable. + * 3. Built-in default. + * This will not preserve unspecified options previously specified. + * @param {TeenyStatisticsOptions} [opts] + * @returns {TeenyStatisticsOptions} + * @private + */ + static _prepareOptions({ concurrentRequests: diConcurrentRequests, } = {}) { + let concurrentRequests = this.DEFAULT_WARN_CONCURRENT_REQUESTS; + const envConcurrentRequests = Number(process.env.TEENY_REQUEST_WARN_CONCURRENT_REQUESTS); + if (diConcurrentRequests !== undefined) { + concurrentRequests = diConcurrentRequests; + } + else if (!Number.isNaN(envConcurrentRequests)) { + concurrentRequests = envConcurrentRequests; + } + return { concurrentRequests }; + } +} +exports.TeenyStatistics = TeenyStatistics; +/** + * @description A default threshold representing when to warn about excessive + * in-flight/concurrent requests. + * @type {number} + * @static + * @readonly + * @default 5000 + */ +TeenyStatistics.DEFAULT_WARN_CONCURRENT_REQUESTS = 5000; +//# sourceMappingURL=TeenyStatistics.js.map + +/***/ }), + +/***/ 4003: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/** + * @license + * Copyright 2019 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getAgent = exports.pool = void 0; +const http_1 = __nccwpck_require__(8611); +const https_1 = __nccwpck_require__(5692); +// eslint-disable-next-line node/no-deprecated-api +const url_1 = __nccwpck_require__(7016); +exports.pool = new Map(); +/** + * Determines if a proxy should be considered based on the environment. + * + * @param uri The request uri + * @returns {boolean} + */ +function shouldUseProxyForURI(uri) { + const noProxyEnv = process.env.NO_PROXY || process.env.no_proxy; + if (!noProxyEnv) { + return true; + } + const givenURI = new URL(uri); + for (const noProxyRaw of noProxyEnv.split(',')) { + const noProxy = noProxyRaw.trim(); + if (noProxy === givenURI.origin || noProxy === givenURI.hostname) { + return false; + } + else if (noProxy.startsWith('*.') || noProxy.startsWith('.')) { + const noProxyWildcard = noProxy.replace(/^\*\./, '.'); + if (givenURI.hostname.endsWith(noProxyWildcard)) { + return false; + } + } + } + return true; +} +/** + * Returns a custom request Agent if one is found, otherwise returns undefined + * which will result in the global http(s) Agent being used. + * @private + * @param {string} uri The request uri + * @param {Options} reqOpts The request options + * @returns {HttpAnyAgent|undefined} + */ +function getAgent(uri, reqOpts) { + const isHttp = uri.startsWith('http://'); + const proxy = reqOpts.proxy || + process.env.HTTP_PROXY || + process.env.http_proxy || + process.env.HTTPS_PROXY || + process.env.https_proxy; + const poolOptions = Object.assign({}, reqOpts.pool); + const manuallyProvidedProxy = !!reqOpts.proxy; + const shouldUseProxy = manuallyProvidedProxy || shouldUseProxyForURI(uri); + if (proxy && shouldUseProxy) { + // tslint:disable-next-line variable-name + const Agent = isHttp + ? __nccwpck_require__(1970) + : __nccwpck_require__(6518); + const proxyOpts = { ...(0, url_1.parse)(proxy), ...poolOptions }; + return new Agent(proxyOpts); + } + let key = isHttp ? 'http' : 'https'; + if (reqOpts.forever) { + key += ':forever'; + if (!exports.pool.has(key)) { + // tslint:disable-next-line variable-name + const Agent = isHttp ? http_1.Agent : https_1.Agent; + exports.pool.set(key, new Agent({ ...poolOptions, keepAlive: true })); + } + } + return exports.pool.get(key); +} +exports.getAgent = getAgent; +//# sourceMappingURL=agents.js.map + +/***/ }), + +/***/ 321: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +/** + * @license + * Copyright 2018 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.teenyRequest = exports.RequestError = void 0; +const node_fetch_1 = __nccwpck_require__(6705); +const stream_1 = __nccwpck_require__(2203); +const uuid = __nccwpck_require__(8993); +const agents_1 = __nccwpck_require__(4003); +const TeenyStatistics_1 = __nccwpck_require__(7745); +// eslint-disable-next-line @typescript-eslint/no-var-requires +const streamEvents = __nccwpck_require__(1546); +class RequestError extends Error { +} +exports.RequestError = RequestError; +/** + * Convert options from Request to Fetch format + * @private + * @param reqOpts Request options + */ +function requestToFetchOptions(reqOpts) { + const options = { + method: reqOpts.method || 'GET', + ...(reqOpts.timeout && { timeout: reqOpts.timeout }), + ...(typeof reqOpts.gzip === 'boolean' && { compress: reqOpts.gzip }), + }; + if (typeof reqOpts.json === 'object') { + // Add Content-type: application/json header + reqOpts.headers = reqOpts.headers || {}; + reqOpts.headers['Content-Type'] = 'application/json'; + // Set body to JSON representation of value + options.body = JSON.stringify(reqOpts.json); + } + else { + if (Buffer.isBuffer(reqOpts.body)) { + options.body = reqOpts.body; + } + else if (typeof reqOpts.body !== 'string') { + options.body = JSON.stringify(reqOpts.body); + } + else { + options.body = reqOpts.body; + } + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + options.headers = reqOpts.headers; + let uri = (reqOpts.uri || + reqOpts.url); + if (!uri) { + throw new Error('Missing uri or url in reqOpts.'); + } + if (reqOpts.useQuerystring === true || typeof reqOpts.qs === 'object') { + // eslint-disable-next-line @typescript-eslint/no-var-requires + const qs = __nccwpck_require__(3480); + const params = qs.stringify(reqOpts.qs); + uri = uri + '?' + params; + } + options.agent = (0, agents_1.getAgent)(uri, reqOpts); + return { uri, options }; +} +/** + * Convert a response from `fetch` to `request` format. + * @private + * @param opts The `request` options used to create the request. + * @param res The Fetch response + * @returns A `request` response object + */ +function fetchToRequestResponse(opts, res) { + const request = {}; + request.agent = opts.agent || false; + request.headers = (opts.headers || {}); + request.href = res.url; + // headers need to be converted from a map to an obj + const resHeaders = {}; + res.headers.forEach((value, key) => (resHeaders[key] = value)); + const response = Object.assign(res.body, { + statusCode: res.status, + statusMessage: res.statusText, + request, + body: res.body, + headers: resHeaders, + toJSON: () => ({ headers: resHeaders }), + }); + return response; +} +/** + * Create POST body from two parts as multipart/related content-type + * @private + * @param boundary + * @param multipart + */ +function createMultipartStream(boundary, multipart) { + const finale = `--${boundary}--`; + const stream = new stream_1.PassThrough(); + for (const part of multipart) { + const preamble = `--${boundary}\r\nContent-Type: ${part['Content-Type']}\r\n\r\n`; + stream.write(preamble); + if (typeof part.body === 'string') { + stream.write(part.body); + stream.write('\r\n'); + } + else { + part.body.pipe(stream, { end: false }); + part.body.on('end', () => { + stream.write('\r\n'); + stream.write(finale); + stream.end(); + }); + } + } + return stream; +} +function teenyRequest(reqOpts, callback) { + const { uri, options } = requestToFetchOptions(reqOpts); + const multipart = reqOpts.multipart; + if (reqOpts.multipart && multipart.length === 2) { + if (!callback) { + // TODO: add support for multipart uploads through streaming + throw new Error('Multipart without callback is not implemented.'); + } + const boundary = uuid.v4(); + options.headers['Content-Type'] = `multipart/related; boundary=${boundary}`; + options.body = createMultipartStream(boundary, multipart); + // Multipart upload + teenyRequest.stats.requestStarting(); + (0, node_fetch_1.default)(uri, options).then(res => { + teenyRequest.stats.requestFinished(); + const header = res.headers.get('content-type'); + const response = fetchToRequestResponse(options, res); + const body = response.body; + if (header === 'application/json' || + header === 'application/json; charset=utf-8') { + res.json().then(json => { + response.body = json; + callback(null, response, json); + }, (err) => { + callback(err, response, body); + }); + return; + } + res.text().then(text => { + response.body = text; + callback(null, response, text); + }, err => { + callback(err, response, body); + }); + }, err => { + teenyRequest.stats.requestFinished(); + callback(err, null, null); + }); + return; + } + if (callback === undefined) { + // Stream mode + const requestStream = streamEvents(new stream_1.PassThrough()); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let responseStream; + requestStream.once('reading', () => { + if (responseStream) { + (0, stream_1.pipeline)(responseStream, requestStream, () => { }); + } + else { + requestStream.once('response', () => { + (0, stream_1.pipeline)(responseStream, requestStream, () => { }); + }); + } + }); + options.compress = false; + teenyRequest.stats.requestStarting(); + (0, node_fetch_1.default)(uri, options).then(res => { + teenyRequest.stats.requestFinished(); + responseStream = res.body; + responseStream.on('error', (err) => { + requestStream.emit('error', err); + }); + const response = fetchToRequestResponse(options, res); + requestStream.emit('response', response); + }, err => { + teenyRequest.stats.requestFinished(); + requestStream.emit('error', err); + }); + // fetch doesn't supply the raw HTTP stream, instead it + // returns a PassThrough piped from the HTTP response + // stream. + return requestStream; + } + // GET or POST with callback + teenyRequest.stats.requestStarting(); + (0, node_fetch_1.default)(uri, options).then(res => { + teenyRequest.stats.requestFinished(); + const header = res.headers.get('content-type'); + const response = fetchToRequestResponse(options, res); + const body = response.body; + if (header === 'application/json' || + header === 'application/json; charset=utf-8') { + if (response.statusCode === 204) { + // Probably a DELETE + callback(null, response, body); + return; + } + res.json().then(json => { + response.body = json; + callback(null, response, json); + }, err => { + callback(err, response, body); + }); + return; + } + res.text().then(text => { + const response = fetchToRequestResponse(options, res); + response.body = text; + callback(null, response, text); + }, err => { + callback(err, response, body); + }); + }, err => { + teenyRequest.stats.requestFinished(); + callback(err, null, null); + }); + return; +} +exports.teenyRequest = teenyRequest; +teenyRequest.defaults = (defaults) => { + return (reqOpts, callback) => { + const opts = { ...defaults, ...reqOpts }; + if (callback === undefined) { + return teenyRequest(opts); + } + teenyRequest(opts, callback); + }; +}; +/** + * Single instance of an interface for keeping track of things. + */ +teenyRequest.stats = new TeenyStatistics_1.TeenyStatistics(); +teenyRequest.resetStats = () => { + teenyRequest.stats = new TeenyStatistics_1.TeenyStatistics(teenyRequest.stats.getOptions()); +}; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 7954: +/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const events_1 = __nccwpck_require__(4434); +const debug_1 = __importDefault(__nccwpck_require__(2830)); +const promisify_1 = __importDefault(__nccwpck_require__(4446)); +const debug = debug_1.default('agent-base'); +function isAgent(v) { + return Boolean(v) && typeof v.addRequest === 'function'; +} +function isSecureEndpoint() { + const { stack } = new Error(); + if (typeof stack !== 'string') + return false; + return stack.split('\n').some(l => l.indexOf('(https.js:') !== -1 || l.indexOf('node:https:') !== -1); +} +function createAgent(callback, opts) { + return new createAgent.Agent(callback, opts); +} +(function (createAgent) { + /** + * Base `http.Agent` implementation. + * No pooling/keep-alive is implemented by default. + * + * @param {Function} callback + * @api public + */ + class Agent extends events_1.EventEmitter { + constructor(callback, _opts) { + super(); + let opts = _opts; + if (typeof callback === 'function') { + this.callback = callback; + } + else if (callback) { + opts = callback; + } + // Timeout for the socket to be returned from the callback + this.timeout = null; + if (opts && typeof opts.timeout === 'number') { + this.timeout = opts.timeout; + } + // These aren't actually used by `agent-base`, but are required + // for the TypeScript definition files in `@types/node` :/ + this.maxFreeSockets = 1; + this.maxSockets = 1; + this.maxTotalSockets = Infinity; + this.sockets = {}; + this.freeSockets = {}; + this.requests = {}; + this.options = {}; + } + get defaultPort() { + if (typeof this.explicitDefaultPort === 'number') { + return this.explicitDefaultPort; + } + return isSecureEndpoint() ? 443 : 80; + } + set defaultPort(v) { + this.explicitDefaultPort = v; + } + get protocol() { + if (typeof this.explicitProtocol === 'string') { + return this.explicitProtocol; + } + return isSecureEndpoint() ? 'https:' : 'http:'; + } + set protocol(v) { + this.explicitProtocol = v; + } + callback(req, opts, fn) { + throw new Error('"agent-base" has no default implementation, you must subclass and override `callback()`'); + } + /** + * Called by node-core's "_http_client.js" module when creating + * a new HTTP request with this Agent instance. + * + * @api public + */ + addRequest(req, _opts) { + const opts = Object.assign({}, _opts); + if (typeof opts.secureEndpoint !== 'boolean') { + opts.secureEndpoint = isSecureEndpoint(); + } + if (opts.host == null) { + opts.host = 'localhost'; + } + if (opts.port == null) { + opts.port = opts.secureEndpoint ? 443 : 80; + } + if (opts.protocol == null) { + opts.protocol = opts.secureEndpoint ? 'https:' : 'http:'; + } + if (opts.host && opts.path) { + // If both a `host` and `path` are specified then it's most + // likely the result of a `url.parse()` call... we need to + // remove the `path` portion so that `net.connect()` doesn't + // attempt to open that as a unix socket file. + delete opts.path; + } + delete opts.agent; + delete opts.hostname; + delete opts._defaultAgent; + delete opts.defaultPort; + delete opts.createConnection; + // Hint to use "Connection: close" + // XXX: non-documented `http` module API :( + req._last = true; + req.shouldKeepAlive = false; + let timedOut = false; + let timeoutId = null; + const timeoutMs = opts.timeout || this.timeout; + const onerror = (err) => { + if (req._hadError) + return; + req.emit('error', err); + // For Safety. Some additional errors might fire later on + // and we need to make sure we don't double-fire the error event. + req._hadError = true; + }; + const ontimeout = () => { + timeoutId = null; + timedOut = true; + const err = new Error(`A "socket" was not created for HTTP request before ${timeoutMs}ms`); + err.code = 'ETIMEOUT'; + onerror(err); + }; + const callbackError = (err) => { + if (timedOut) + return; + if (timeoutId !== null) { + clearTimeout(timeoutId); + timeoutId = null; + } + onerror(err); + }; + const onsocket = (socket) => { + if (timedOut) + return; + if (timeoutId != null) { + clearTimeout(timeoutId); + timeoutId = null; + } + if (isAgent(socket)) { + // `socket` is actually an `http.Agent` instance, so + // relinquish responsibility for this `req` to the Agent + // from here on + debug('Callback returned another Agent instance %o', socket.constructor.name); + socket.addRequest(req, opts); + return; + } + if (socket) { + socket.once('free', () => { + this.freeSocket(socket, opts); + }); + req.onSocket(socket); + return; + } + const err = new Error(`no Duplex stream was returned to agent-base for \`${req.method} ${req.path}\``); + onerror(err); + }; + if (typeof this.callback !== 'function') { + onerror(new Error('`callback` is not defined')); + return; + } + if (!this.promisifiedCallback) { + if (this.callback.length >= 3) { + debug('Converting legacy callback function to promise'); + this.promisifiedCallback = promisify_1.default(this.callback); + } + else { + this.promisifiedCallback = this.callback; + } + } + if (typeof timeoutMs === 'number' && timeoutMs > 0) { + timeoutId = setTimeout(ontimeout, timeoutMs); + } + if ('port' in opts && typeof opts.port !== 'number') { + opts.port = Number(opts.port); + } + try { + debug('Resolving socket for %o request: %o', opts.protocol, `${req.method} ${req.path}`); + Promise.resolve(this.promisifiedCallback(req, opts)).then(onsocket, callbackError); + } + catch (err) { + Promise.reject(err).catch(callbackError); + } + } + freeSocket(socket, opts) { + debug('Freeing socket %o %o', socket.constructor.name, opts); + socket.destroy(); + } + destroy() { + debug('Destroying agent %o', this.constructor.name); + } + } + createAgent.Agent = Agent; + // So that `instanceof` works correctly + createAgent.prototype = createAgent.Agent.prototype; +})(createAgent || (createAgent = {})); +module.exports = createAgent; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 4446: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +function promisify(fn) { + return function (req, opts) { + return new Promise((resolve, reject) => { + fn.call(this, req, opts, (err, rtn) => { + if (err) { + reject(err); + } + else { + resolve(rtn); + } + }); + }); + }; +} +exports["default"] = promisify; +//# sourceMappingURL=promisify.js.map + +/***/ }), + +/***/ 5299: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const net_1 = __importDefault(__nccwpck_require__(9278)); +const tls_1 = __importDefault(__nccwpck_require__(4756)); +const url_1 = __importDefault(__nccwpck_require__(7016)); +const assert_1 = __importDefault(__nccwpck_require__(2613)); +const debug_1 = __importDefault(__nccwpck_require__(2830)); +const agent_base_1 = __nccwpck_require__(7954); +const parse_proxy_response_1 = __importDefault(__nccwpck_require__(7742)); +const debug = debug_1.default('https-proxy-agent:agent'); +/** + * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to + * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests. + * + * Outgoing HTTP requests are first tunneled through the proxy server using the + * `CONNECT` HTTP request method to establish a connection to the proxy server, + * and then the proxy server connects to the destination target and issues the + * HTTP request from the proxy server. + * + * `https:` requests have their socket connection upgraded to TLS once + * the connection to the proxy server has been established. + * + * @api public + */ +class HttpsProxyAgent extends agent_base_1.Agent { + constructor(_opts) { + let opts; + if (typeof _opts === 'string') { + opts = url_1.default.parse(_opts); + } + else { + opts = _opts; + } + if (!opts) { + throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!'); + } + debug('creating new HttpsProxyAgent instance: %o', opts); + super(opts); + const proxy = Object.assign({}, opts); + // If `true`, then connect to the proxy server over TLS. + // Defaults to `false`. + this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol); + // Prefer `hostname` over `host`, and set the `port` if needed. + proxy.host = proxy.hostname || proxy.host; + if (typeof proxy.port === 'string') { + proxy.port = parseInt(proxy.port, 10); + } + if (!proxy.port && proxy.host) { + proxy.port = this.secureProxy ? 443 : 80; + } + // ALPN is supported by Node.js >= v5. + // attempt to negotiate http/1.1 for proxy servers that support http/2 + if (this.secureProxy && !('ALPNProtocols' in proxy)) { + proxy.ALPNProtocols = ['http 1.1']; + } + if (proxy.host && proxy.path) { + // If both a `host` and `path` are specified then it's most likely + // the result of a `url.parse()` call... we need to remove the + // `path` portion so that `net.connect()` doesn't attempt to open + // that as a Unix socket file. + delete proxy.path; + delete proxy.pathname; + } + this.proxy = proxy; + } + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + * + * @api protected + */ + callback(req, opts) { + return __awaiter(this, void 0, void 0, function* () { + const { proxy, secureProxy } = this; + // Create a socket connection to the proxy server. + let socket; + if (secureProxy) { + debug('Creating `tls.Socket`: %o', proxy); + socket = tls_1.default.connect(proxy); + } + else { + debug('Creating `net.Socket`: %o', proxy); + socket = net_1.default.connect(proxy); + } + const headers = Object.assign({}, proxy.headers); + const hostname = `${opts.host}:${opts.port}`; + let payload = `CONNECT ${hostname} HTTP/1.1\r\n`; + // Inject the `Proxy-Authorization` header if necessary. + if (proxy.auth) { + headers['Proxy-Authorization'] = `Basic ${Buffer.from(proxy.auth).toString('base64')}`; + } + // The `Host` header should only include the port + // number when it is not the default port. + let { host, port, secureEndpoint } = opts; + if (!isDefaultPort(port, secureEndpoint)) { + host += `:${port}`; + } + headers.Host = host; + headers.Connection = 'close'; + for (const name of Object.keys(headers)) { + payload += `${name}: ${headers[name]}\r\n`; + } + const proxyResponsePromise = parse_proxy_response_1.default(socket); + socket.write(`${payload}\r\n`); + const { statusCode, buffered } = yield proxyResponsePromise; + if (statusCode === 200) { + req.once('socket', resume); + if (opts.secureEndpoint) { + // The proxy is connecting to a TLS server, so upgrade + // this socket connection to a TLS connection. + debug('Upgrading socket connection to TLS'); + const servername = opts.servername || opts.host; + return tls_1.default.connect(Object.assign(Object.assign({}, omit(opts, 'host', 'hostname', 'path', 'port')), { socket, + servername })); + } + return socket; + } + // Some other status code that's not 200... need to re-play the HTTP + // header "data" events onto the socket once the HTTP machinery is + // attached so that the node core `http` can parse and handle the + // error status code. + // Close the original socket, and a new "fake" socket is returned + // instead, so that the proxy doesn't get the HTTP request + // written to it (which may contain `Authorization` headers or other + // sensitive data). + // + // See: https://hackerone.com/reports/541502 + socket.destroy(); + const fakeSocket = new net_1.default.Socket({ writable: false }); + fakeSocket.readable = true; + // Need to wait for the "socket" event to re-play the "data" events. + req.once('socket', (s) => { + debug('replaying proxy buffer for failed request'); + assert_1.default(s.listenerCount('data') > 0); + // Replay the "buffered" Buffer onto the fake `socket`, since at + // this point the HTTP module machinery has been hooked up for + // the user. + s.push(buffered); + s.push(null); + }); + return fakeSocket; + }); + } +} +exports["default"] = HttpsProxyAgent; +function resume(socket) { + socket.resume(); +} +function isDefaultPort(port, secure) { + return Boolean((!secure && port === 80) || (secure && port === 443)); +} +function isHTTPS(protocol) { + return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false; +} +function omit(obj, ...keys) { + const ret = {}; + let key; + for (key in obj) { + if (!keys.includes(key)) { + ret[key] = obj[key]; + } + } + return ret; +} +//# sourceMappingURL=agent.js.map + +/***/ }), + +/***/ 6518: +/***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const agent_1 = __importDefault(__nccwpck_require__(5299)); +function createHttpsProxyAgent(opts) { + return new agent_1.default(opts); +} +(function (createHttpsProxyAgent) { + createHttpsProxyAgent.HttpsProxyAgent = agent_1.default; + createHttpsProxyAgent.prototype = agent_1.default.prototype; +})(createHttpsProxyAgent || (createHttpsProxyAgent = {})); +module.exports = createHttpsProxyAgent; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 7742: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const debug_1 = __importDefault(__nccwpck_require__(2830)); +const debug = debug_1.default('https-proxy-agent:parse-proxy-response'); +function parseProxyResponse(socket) { + return new Promise((resolve, reject) => { + // we need to buffer any HTTP traffic that happens with the proxy before we get + // the CONNECT response, so that if the response is anything other than an "200" + // response code, then we can re-play the "data" events on the socket once the + // HTTP parser is hooked up... + let buffersLength = 0; + const buffers = []; + function read() { + const b = socket.read(); + if (b) + ondata(b); + else + socket.once('readable', read); + } + function cleanup() { + socket.removeListener('end', onend); + socket.removeListener('error', onerror); + socket.removeListener('close', onclose); + socket.removeListener('readable', read); + } + function onclose(err) { + debug('onclose had error %o', err); + } + function onend() { + debug('onend'); + } + function onerror(err) { + cleanup(); + debug('onerror %o', err); + reject(err); + } + function ondata(b) { + buffers.push(b); + buffersLength += b.length; + const buffered = Buffer.concat(buffers, buffersLength); + const endOfHeaders = buffered.indexOf('\r\n\r\n'); + if (endOfHeaders === -1) { + // keep buffering + debug('have not received end of HTTP headers yet...'); + read(); + return; + } + const firstLine = buffered.toString('ascii', 0, buffered.indexOf('\r\n')); + const statusCode = +firstLine.split(' ')[1]; + debug('got proxy server response: %o', firstLine); + resolve({ + statusCode, + buffered + }); + } + socket.on('error', onerror); + socket.on('close', onclose); + socket.on('end', onend); + read(); + }); +} +exports["default"] = parseProxyResponse; +//# sourceMappingURL=parse-proxy-response.js.map + +/***/ }), + +/***/ 8993: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +Object.defineProperty(exports, "NIL", ({ + enumerable: true, + get: function () { + return _nil.default; + } +})); +Object.defineProperty(exports, "parse", ({ + enumerable: true, + get: function () { + return _parse.default; + } +})); +Object.defineProperty(exports, "stringify", ({ + enumerable: true, + get: function () { + return _stringify.default; + } +})); +Object.defineProperty(exports, "v1", ({ + enumerable: true, + get: function () { + return _v.default; + } +})); +Object.defineProperty(exports, "v3", ({ + enumerable: true, + get: function () { + return _v2.default; + } +})); +Object.defineProperty(exports, "v4", ({ + enumerable: true, + get: function () { + return _v3.default; + } +})); +Object.defineProperty(exports, "v5", ({ + enumerable: true, + get: function () { + return _v4.default; + } +})); +Object.defineProperty(exports, "validate", ({ + enumerable: true, + get: function () { + return _validate.default; + } +})); +Object.defineProperty(exports, "version", ({ + enumerable: true, + get: function () { + return _version.default; + } +})); + +var _v = _interopRequireDefault(__nccwpck_require__(4684)); + +var _v2 = _interopRequireDefault(__nccwpck_require__(3142)); + +var _v3 = _interopRequireDefault(__nccwpck_require__(9655)); + +var _v4 = _interopRequireDefault(__nccwpck_require__(880)); + +var _nil = _interopRequireDefault(__nccwpck_require__(194)); + +var _version = _interopRequireDefault(__nccwpck_require__(6257)); + +var _validate = _interopRequireDefault(__nccwpck_require__(1579)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(1400)); + +var _parse = _interopRequireDefault(__nccwpck_require__(7814)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/***/ }), + +/***/ 8061: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); +} + +var _default = md5; +exports["default"] = _default; + +/***/ }), + +/***/ 7966: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var _default = { + randomUUID: _crypto.default.randomUUID +}; +exports["default"] = _default; + +/***/ }), + +/***/ 194: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports["default"] = _default; + +/***/ }), + +/***/ 7814: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(1579)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports["default"] = _default; + +/***/ }), + +/***/ 1118: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports["default"] = _default; + +/***/ }), + +/***/ 8540: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = rng; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; + +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); + + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} + +/***/ }), + +/***/ 1352: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('sha1').update(bytes).digest(); +} + +var _default = sha1; +exports["default"] = _default; + +/***/ }), + +/***/ 1400: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +exports.unsafeStringify = unsafeStringify; + +var _validate = _interopRequireDefault(__nccwpck_require__(1579)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).slice(1)); +} + +function unsafeStringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]; +} + +function stringify(arr, offset = 0) { + const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports["default"] = _default; + +/***/ }), + +/***/ 4684: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(8540)); + +var _stringify = __nccwpck_require__(1400); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.unsafeStringify)(b); +} + +var _default = v1; +exports["default"] = _default; + +/***/ }), + +/***/ 3142: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(4575)); + +var _md = _interopRequireDefault(__nccwpck_require__(8061)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports["default"] = _default; + +/***/ }), + +/***/ 4575: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports.URL = exports.DNS = void 0; +exports["default"] = v35; + +var _stringify = __nccwpck_require__(1400); + +var _parse = _interopRequireDefault(__nccwpck_require__(7814)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function v35(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + var _namespace; + + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} + +/***/ }), + +/***/ 9655: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _native = _interopRequireDefault(__nccwpck_require__(7966)); + +var _rng = _interopRequireDefault(__nccwpck_require__(8540)); + +var _stringify = __nccwpck_require__(1400); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + if (_native.default.randomUUID && !buf && !options) { + return _native.default.randomUUID(); + } + + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(rnds); +} + +var _default = v4; +exports["default"] = _default; + +/***/ }), + +/***/ 880: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(4575)); + +var _sha = _interopRequireDefault(__nccwpck_require__(1352)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports["default"] = _default; + +/***/ }), + +/***/ 1579: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _regex = _interopRequireDefault(__nccwpck_require__(1118)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports["default"] = _default; + +/***/ }), + +/***/ 6257: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(1579)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.slice(14, 15), 16); +} + +var _default = version; +exports["default"] = _default; + /***/ }), /***/ 770: @@ -59887,6 +86241,705 @@ if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { exports.debug = debug; // for test +/***/ }), + +/***/ 4488: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + + +/** + * For Node.js, simply re-export the core `util.deprecate` function. + */ + +module.exports = __nccwpck_require__(9023).deprecate; + + +/***/ }), + +/***/ 2048: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +Object.defineProperty(exports, "v1", ({ + enumerable: true, + get: function () { + return _v.default; + } +})); +Object.defineProperty(exports, "v3", ({ + enumerable: true, + get: function () { + return _v2.default; + } +})); +Object.defineProperty(exports, "v4", ({ + enumerable: true, + get: function () { + return _v3.default; + } +})); +Object.defineProperty(exports, "v5", ({ + enumerable: true, + get: function () { + return _v4.default; + } +})); +Object.defineProperty(exports, "NIL", ({ + enumerable: true, + get: function () { + return _nil.default; + } +})); +Object.defineProperty(exports, "version", ({ + enumerable: true, + get: function () { + return _version.default; + } +})); +Object.defineProperty(exports, "validate", ({ + enumerable: true, + get: function () { + return _validate.default; + } +})); +Object.defineProperty(exports, "stringify", ({ + enumerable: true, + get: function () { + return _stringify.default; + } +})); +Object.defineProperty(exports, "parse", ({ + enumerable: true, + get: function () { + return _parse.default; + } +})); + +var _v = _interopRequireDefault(__nccwpck_require__(6415)); + +var _v2 = _interopRequireDefault(__nccwpck_require__(1697)); + +var _v3 = _interopRequireDefault(__nccwpck_require__(4676)); + +var _v4 = _interopRequireDefault(__nccwpck_require__(9771)); + +var _nil = _interopRequireDefault(__nccwpck_require__(7723)); + +var _version = _interopRequireDefault(__nccwpck_require__(5868)); + +var _validate = _interopRequireDefault(__nccwpck_require__(6200)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(7597)); + +var _parse = _interopRequireDefault(__nccwpck_require__(7267)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/***/ }), + +/***/ 216: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); +} + +var _default = md5; +exports["default"] = _default; + +/***/ }), + +/***/ 7723: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports["default"] = _default; + +/***/ }), + +/***/ 7267: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(6200)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports["default"] = _default; + +/***/ }), + +/***/ 7879: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports["default"] = _default; + +/***/ }), + +/***/ 2973: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = rng; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; + +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); + + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} + +/***/ }), + +/***/ 507: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('sha1').update(bytes).digest(); +} + +var _default = sha1; +exports["default"] = _default; + +/***/ }), + +/***/ 7597: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(6200)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).substr(1)); +} + +function stringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports["default"] = _default; + +/***/ }), + +/***/ 6415: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(2973)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(7597)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.default)(b); +} + +var _default = v1; +exports["default"] = _default; + +/***/ }), + +/***/ 1697: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(2930)); + +var _md = _interopRequireDefault(__nccwpck_require__(216)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports["default"] = _default; + +/***/ }), + +/***/ 2930: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = _default; +exports.URL = exports.DNS = void 0; + +var _stringify = _interopRequireDefault(__nccwpck_require__(7597)); + +var _parse = _interopRequireDefault(__nccwpck_require__(7267)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function _default(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (namespace.length !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.default)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} + +/***/ }), + +/***/ 4676: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(2973)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(7597)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.default)(rnds); +} + +var _default = v4; +exports["default"] = _default; + +/***/ }), + +/***/ 9771: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(2930)); + +var _sha = _interopRequireDefault(__nccwpck_require__(507)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports["default"] = _default; + +/***/ }), + +/***/ 6200: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _regex = _interopRequireDefault(__nccwpck_require__(7879)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports["default"] = _default; + +/***/ }), + +/***/ 5868: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(6200)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.substr(14, 1), 16); +} + +var _default = version; +exports["default"] = _default; + +/***/ }), + +/***/ 8264: +/***/ ((module) => { + +// Returns a wrapper function that returns a wrapped callback +// The wrapper function should do some stuff, and return a +// presumably different callback function. +// This makes sure that own properties are retained, so that +// decorations and such are not lost along the way. +module.exports = wrappy +function wrappy (fn, cb) { + if (fn && cb) return wrappy(fn)(cb) + + if (typeof fn !== 'function') + throw new TypeError('need wrapper function') + + Object.keys(fn).forEach(function (k) { + wrapper[k] = fn[k] + }) + + return wrapper + + function wrapper() { + var args = new Array(arguments.length) + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i] + } + var ret = fn.apply(this, args) + var cb = args[args.length-1] + if (typeof ret === 'function' && ret !== cb) { + Object.keys(cb).forEach(function (k) { + ret[k] = cb[k] + }) + } + return ret + } +} + + /***/ }), /***/ 8736: @@ -64894,6 +91947,81 @@ exports.debug = debug; // for test }).call(this); +/***/ }), + +/***/ 538: +/***/ ((module) => { + +class Node { + /// value; + /// next; + + constructor(value) { + this.value = value; + + // TODO: Remove this when targeting Node.js 12. + this.next = undefined; + } +} + +class Queue { + // TODO: Use private class fields when targeting Node.js 12. + // #_head; + // #_tail; + // #_size; + + constructor() { + this.clear(); + } + + enqueue(value) { + const node = new Node(value); + + if (this._head) { + this._tail.next = node; + this._tail = node; + } else { + this._head = node; + this._tail = node; + } + + this._size++; + } + + dequeue() { + const current = this._head; + if (!current) { + return; + } + + this._head = this._head.next; + this._size--; + return current.value; + } + + clear() { + this._head = undefined; + this._tail = undefined; + this._size = 0; + } + + get size() { + return this._size; + } + + * [Symbol.iterator]() { + let current = this._head; + + while (current) { + yield current.value; + current = current.next; + } + } +} + +module.exports = Queue; + + /***/ }), /***/ 7242: @@ -64911,25 +92039,27 @@ var Inputs; Inputs["UploadChunkSize"] = "upload-chunk-size"; Inputs["EnableCrossOsArchive"] = "enableCrossOsArchive"; Inputs["FailOnCacheMiss"] = "fail-on-cache-miss"; - Inputs["LookupOnly"] = "lookup-only"; // Input for cache, restore action -})(Inputs = exports.Inputs || (exports.Inputs = {})); + Inputs["LookupOnly"] = "lookup-only"; + Inputs["GCSBucket"] = "gcs-bucket"; + Inputs["GCSPathPrefix"] = "gcs-path-prefix"; // Input for cache, restore, save action +})(Inputs || (exports.Inputs = Inputs = {})); var Outputs; (function (Outputs) { Outputs["CacheHit"] = "cache-hit"; Outputs["CachePrimaryKey"] = "cache-primary-key"; Outputs["CacheMatchedKey"] = "cache-matched-key"; // Output from restore action -})(Outputs = exports.Outputs || (exports.Outputs = {})); +})(Outputs || (exports.Outputs = Outputs = {})); var State; (function (State) { State["CachePrimaryKey"] = "CACHE_KEY"; State["CacheMatchedKey"] = "CACHE_RESULT"; -})(State = exports.State || (exports.State = {})); +})(State || (exports.State = State = {})); var Events; (function (Events) { Events["Key"] = "GITHUB_EVENT_NAME"; Events["Push"] = "push"; Events["PullRequest"] = "pull_request"; -})(Events = exports.Events || (exports.Events = {})); +})(Events || (exports.Events = Events = {})); exports.RefKey = "GITHUB_REF"; @@ -64956,13 +92086,23 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( }) : function(o, v) { o["default"] = v; }); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { @@ -64973,8 +92113,10 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.saveRun = exports.saveOnlyRun = exports.saveImpl = void 0; -const cache = __importStar(__nccwpck_require__(5116)); +exports.saveImpl = saveImpl; +exports.saveOnlyRun = saveOnlyRun; +exports.saveRun = saveRun; +const cache = __importStar(__nccwpck_require__(9614)); const core = __importStar(__nccwpck_require__(7484)); const constants_1 = __nccwpck_require__(7242); const stateProvider_1 = __nccwpck_require__(2879); @@ -65024,7 +92166,6 @@ function saveImpl(stateProvider) { return cacheId; }); } -exports.saveImpl = saveImpl; function saveOnlyRun(earlyExit) { return __awaiter(this, void 0, void 0, function* () { try { @@ -65049,7 +92190,6 @@ function saveOnlyRun(earlyExit) { } }); } -exports.saveOnlyRun = saveOnlyRun; function saveRun(earlyExit) { return __awaiter(this, void 0, void 0, function* () { try { @@ -65071,7 +92211,6 @@ function saveRun(earlyExit) { } }); } -exports.saveRun = saveRun; /***/ }), @@ -65097,13 +92236,23 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( }) : function(o, v) { o["default"] = v; }); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); Object.defineProperty(exports, "__esModule", ({ value: true })); exports.NullStateProvider = exports.StateProvider = void 0; const core = __importStar(__nccwpck_require__(7484)); @@ -65172,15 +92321,33 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( }) : function(o, v) { o["default"] = v; }); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.isCacheFeatureAvailable = exports.getInputAsBool = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.isExactKeyMatch = exports.isGhes = void 0; +exports.isGhes = isGhes; +exports.isExactKeyMatch = isExactKeyMatch; +exports.logWarning = logWarning; +exports.isValidEvent = isValidEvent; +exports.getInputAsArray = getInputAsArray; +exports.getInputAsInt = getInputAsInt; +exports.getInputAsBool = getInputAsBool; +exports.isGCSAvailable = isGCSAvailable; +exports.isCacheFeatureAvailable = isCacheFeatureAvailable; const cache = __importStar(__nccwpck_require__(5116)); const core = __importStar(__nccwpck_require__(7484)); const constants_1 = __nccwpck_require__(7242); @@ -65192,25 +92359,21 @@ function isGhes() { const isLocalHost = hostname.endsWith(".LOCALHOST"); return !isGitHubHost && !isGitHubEnterpriseCloudHost && !isLocalHost; } -exports.isGhes = isGhes; function isExactKeyMatch(key, cacheKey) { return !!(cacheKey && cacheKey.localeCompare(key, undefined, { sensitivity: "accent" }) === 0); } -exports.isExactKeyMatch = isExactKeyMatch; function logWarning(message) { const warningPrefix = "[warning]"; core.info(`${warningPrefix}${message}`); } -exports.logWarning = logWarning; // Cache token authorized for all events that are tied to a ref // See GitHub Context https://help.github.com/actions/automating-your-workflow-with-github-actions/contexts-and-expression-syntax-for-github-actions#github-context function isValidEvent() { return constants_1.RefKey in process.env && Boolean(process.env[constants_1.RefKey]); } -exports.isValidEvent = isValidEvent; function getInputAsArray(name, options) { return core .getInput(name, options) @@ -65218,7 +92381,6 @@ function getInputAsArray(name, options) { .map(s => s.replace(/^!\s+/, "!").trim()) .filter(x => x !== ""); } -exports.getInputAsArray = getInputAsArray; function getInputAsInt(name, options) { const value = parseInt(core.getInput(name, options)); if (isNaN(value) || value < 0) { @@ -65226,13 +92388,31 @@ function getInputAsInt(name, options) { } return value; } -exports.getInputAsInt = getInputAsInt; function getInputAsBool(name, options) { const result = core.getInput(name, options); return result.toLowerCase() === "true"; } -exports.getInputAsBool = getInputAsBool; +// Check if GCS is configured and available +function isGCSAvailable() { + try { + const bucket = core.getInput(constants_1.Inputs.GCSBucket); + if (!bucket) { + core.info("GCS bucket name not provided, falling back to GitHub cache"); + return false; + } + return true; + } + catch (error) { + logWarning(`Failed to check GCS availability: ${error.message}`); + return false; + } +} function isCacheFeatureAvailable() { + // Check if GCS cache is available + if (isGCSAvailable()) { + return true; + } + // Otherwise, check GitHub cache if (cache.isFeatureAvailable()) { return true; } @@ -65244,7 +92424,242 @@ Otherwise please upgrade to GHES version >= 3.5 and If you are also using Github logWarning("An internal error has occurred in cache backend. Please check https://www.githubstatus.com/ for any ongoing issue in actions."); return false; } -exports.isCacheFeatureAvailable = isCacheFeatureAvailable; + + +/***/ }), + +/***/ 9614: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.restoreCache = restoreCache; +exports.saveCache = saveCache; +exports.isFeatureAvailable = isFeatureAvailable; +const core = __importStar(__nccwpck_require__(7484)); +const path = __importStar(__nccwpck_require__(6928)); +const constants_1 = __nccwpck_require__(7242); +const actionUtils_1 = __nccwpck_require__(8270); +const utils = __importStar(__nccwpck_require__(8299)); +const storage_1 = __nccwpck_require__(8525); +const cache = __importStar(__nccwpck_require__(5116)); +const tar_1 = __nccwpck_require__(5321); +const DEFAULT_PATH_PREFIX = "github-cache"; +// Function to initialize GCS client using Application Default Credentials +function getGCSClient() { + try { + core.info("Initializing GCS client"); + return new storage_1.Storage(); + } + catch (error) { + core.warning(`Failed to initialize GCS client: ${error.message}`); + return null; + } +} +function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive) { + return __awaiter(this, void 0, void 0, function* () { + // Check if GCS is available + if ((0, actionUtils_1.isGCSAvailable)()) { + try { + const result = yield restoreFromGCS(paths, primaryKey, restoreKeys, options); + if (result) { + core.info(`Cache restored from GCS with key: ${result}`); + return result; + } + core.info("Cache not found in GCS, falling back to GitHub cache"); + } + catch (error) { + core.warning(`Failed to restore from GCS: ${error.message}`); + core.info("Falling back to GitHub cache"); + } + } + // Fall back to GitHub cache + return yield cache.restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive); + }); +} +function saveCache(paths, key, options, enableCrossOsArchive) { + return __awaiter(this, void 0, void 0, function* () { + if ((0, actionUtils_1.isGCSAvailable)()) { + try { + const result = yield saveToGCS(paths, key); + if (result) { + core.info(`Cache saved to GCS with key: ${key}`); + return result; // Success ID + } + core.warning("Failed to save to GCS, falling back to GitHub cache"); + } + catch (error) { + core.warning(`Failed to save to GCS: ${error.message}`); + core.info("Falling back to GitHub cache"); + } + } + // Fall back to GitHub cache + return yield cache.saveCache(paths, key, options, enableCrossOsArchive); + }); +} +// Function that checks if the cache feature is available (either GCS or GitHub cache) +function isFeatureAvailable() { + return (0, actionUtils_1.isGCSAvailable)() || cache.isFeatureAvailable(); +} +function restoreFromGCS(_paths_1, primaryKey_1) { + return __awaiter(this, arguments, void 0, function* (_paths, // validate paths? + primaryKey, restoreKeys = [], options) { + const storage = getGCSClient(); + if (!storage) { + return undefined; + } + const bucket = core.getInput(constants_1.Inputs.GCSBucket); + const pathPrefix = core.getInput(constants_1.Inputs.GCSPathPrefix) || DEFAULT_PATH_PREFIX; + const compressionMethod = yield utils.getCompressionMethod(); + const archiveFolder = yield utils.createTempDirectory(); + const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + const keys = [primaryKey, ...restoreKeys]; + const gcsPath = yield findFileOnGCS(storage, bucket, pathPrefix, keys, compressionMethod); + if (!gcsPath) { + core.info(`No matching cache found`); + return undefined; + } + // If lookup only, just return the key + if (options === null || options === void 0 ? void 0 : options.lookupOnly) { + core.info(`Cache found in GCS with key: ${gcsPath}`); + return gcsPath; + } + try { + core.info(`Downloading from GCS: ${bucket}/${gcsPath}`); + const file = storage.bucket(bucket).file(gcsPath); + yield file.download({ destination: archivePath }); + if (core.isDebug()) { + yield (0, tar_1.listTar)(archivePath, compressionMethod); + } + const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); + core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + yield (0, tar_1.extractTar)(archivePath, compressionMethod); + core.info('Cache restored successfully'); + return gcsPath; + } + catch (error) { + core.warning(`Failed to restore: ${error.message}`); + } + finally { + try { + yield utils.unlinkFile(archivePath); + } + catch (error) { + core.debug(`Failed to delete archive: ${error}`); + } + } + }); +} +function getGCSPath(pathPrefix, key, compressionMethod) { + return `${pathPrefix}/${key}.${utils.getCacheFileName(compressionMethod)}`; +} +function saveToGCS(paths, key) { + return __awaiter(this, void 0, void 0, function* () { + let cacheId = -1; + const storage = getGCSClient(); + if (!storage) { + return cacheId; + } + const bucket = core.getInput(constants_1.Inputs.GCSBucket); + const pathPrefix = core.getInput(constants_1.Inputs.GCSPathPrefix) || DEFAULT_PATH_PREFIX; + const compressionMethod = yield utils.getCompressionMethod(); + const cachePaths = yield utils.resolvePaths(paths); + core.debug('Cache Paths:'); + core.debug(`${JSON.stringify(cachePaths)}`); + if (cachePaths.length === 0) { + throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); + } + const archiveFolder = yield utils.createTempDirectory(); + const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + core.debug(`Archive Path: ${archivePath}`); + try { + yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); + if (core.isDebug()) { + yield (0, tar_1.listTar)(archivePath, compressionMethod); + } + const gcsPath = getGCSPath(pathPrefix, key, compressionMethod); + core.info(`Uploading to GCS: ${bucket}/${gcsPath}`); + yield storage.bucket(bucket).upload(archivePath, { + destination: gcsPath, + resumable: true, // this may not be necessary + }); + return 1; + } + catch (error) { + core.warning(`Error creating or uploading cache: ${error.message}`); + return -1; + } + finally { + try { + yield utils.unlinkFile(archivePath); + } + catch (error) { + core.debug(`Failed to delete archive: ${error}`); + } + } + }); +} +function findFileOnGCS(storage, bucket, pathPrefix, keys, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + for (const key of keys) { + const gcsPath = getGCSPath(pathPrefix, key, compressionMethod); + if (yield checkFileExists(storage, bucket, gcsPath)) { + core.info(`Found file on bucket: ${bucket} with key: ${gcsPath}`); + return gcsPath; + } + } + return undefined; + }); +} +function checkFileExists(storage, bucket, path) { + return __awaiter(this, void 0, void 0, function* () { + const [exists] = yield storage.bucket(bucket).file(path).exists(); + return exists; + }); +} /***/ }), @@ -65329,6 +92744,30 @@ module.exports = require("net"); /***/ }), +/***/ 8474: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:events"); + +/***/ }), + +/***/ 1708: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:process"); + +/***/ }), + +/***/ 7975: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:util"); + +/***/ }), + /***/ 857: /***/ ((module) => { @@ -65353,6 +92792,14 @@ module.exports = require("punycode"); /***/ }), +/***/ 3480: +/***/ ((module) => { + +"use strict"; +module.exports = require("querystring"); + +/***/ }), + /***/ 2203: /***/ ((module) => { @@ -65385,6 +92832,14 @@ module.exports = require("tls"); /***/ }), +/***/ 2018: +/***/ ((module) => { + +"use strict"; +module.exports = require("tty"); + +/***/ }), + /***/ 7016: /***/ ((module) => { @@ -65407,6 +92862,14404 @@ module.exports = require("util"); "use strict"; module.exports = require("zlib"); +/***/ }), + +/***/ 6637: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AclRoleAccessorMethods = exports.Acl = void 0; +const promisify_1 = __nccwpck_require__(206); +/** + * Attach functionality to a {@link Storage.acl} instance. This will add an + * object for each role group (owners, readers, and writers), with each object + * containing methods to add or delete a type of entity. + * + * As an example, here are a few methods that are created. + * + * myBucket.acl.readers.deleteGroup('groupId', function(err) {}); + * + * myBucket.acl.owners.addUser('email@example.com', function(err, acl) {}); + * + * myBucket.acl.writers.addDomain('example.com', function(err, acl) {}); + * + * @private + */ +class AclRoleAccessorMethods { + constructor() { + this.owners = {}; + this.readers = {}; + this.writers = {}; + /** + * An object of convenience methods to add or delete owner ACL permissions + * for a given entity. + * + * The supported methods include: + * + * - `myFile.acl.owners.addAllAuthenticatedUsers` + * - `myFile.acl.owners.deleteAllAuthenticatedUsers` + * - `myFile.acl.owners.addAllUsers` + * - `myFile.acl.owners.deleteAllUsers` + * - `myFile.acl.owners.addDomain` + * - `myFile.acl.owners.deleteDomain` + * - `myFile.acl.owners.addGroup` + * - `myFile.acl.owners.deleteGroup` + * - `myFile.acl.owners.addProject` + * - `myFile.acl.owners.deleteProject` + * - `myFile.acl.owners.addUser` + * - `myFile.acl.owners.deleteUser` + * + * @name Acl#owners + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * //- + * // Add a user as an owner of a file. + * //- + * const myBucket = gcs.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * myFile.acl.owners.addUser('email@example.com', function(err, aclObject) + * {}); + * + * //- + * // For reference, the above command is the same as running the following. + * //- + * myFile.acl.add({ + * entity: 'user-email@example.com', + * role: gcs.acl.OWNER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.owners.addUser('email@example.com').then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + this.owners = {}; + /** + * An object of convenience methods to add or delete reader ACL permissions + * for a given entity. + * + * The supported methods include: + * + * - `myFile.acl.readers.addAllAuthenticatedUsers` + * - `myFile.acl.readers.deleteAllAuthenticatedUsers` + * - `myFile.acl.readers.addAllUsers` + * - `myFile.acl.readers.deleteAllUsers` + * - `myFile.acl.readers.addDomain` + * - `myFile.acl.readers.deleteDomain` + * - `myFile.acl.readers.addGroup` + * - `myFile.acl.readers.deleteGroup` + * - `myFile.acl.readers.addProject` + * - `myFile.acl.readers.deleteProject` + * - `myFile.acl.readers.addUser` + * - `myFile.acl.readers.deleteUser` + * + * @name Acl#readers + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * //- + * // Add a user as a reader of a file. + * //- + * myFile.acl.readers.addUser('email@example.com', function(err, aclObject) + * {}); + * + * //- + * // For reference, the above command is the same as running the following. + * //- + * myFile.acl.add({ + * entity: 'user-email@example.com', + * role: gcs.acl.READER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.readers.addUser('email@example.com').then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + this.readers = {}; + /** + * An object of convenience methods to add or delete writer ACL permissions + * for a given entity. + * + * The supported methods include: + * + * - `myFile.acl.writers.addAllAuthenticatedUsers` + * - `myFile.acl.writers.deleteAllAuthenticatedUsers` + * - `myFile.acl.writers.addAllUsers` + * - `myFile.acl.writers.deleteAllUsers` + * - `myFile.acl.writers.addDomain` + * - `myFile.acl.writers.deleteDomain` + * - `myFile.acl.writers.addGroup` + * - `myFile.acl.writers.deleteGroup` + * - `myFile.acl.writers.addProject` + * - `myFile.acl.writers.deleteProject` + * - `myFile.acl.writers.addUser` + * - `myFile.acl.writers.deleteUser` + * + * @name Acl#writers + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * //- + * // Add a user as a writer of a file. + * //- + * myFile.acl.writers.addUser('email@example.com', function(err, aclObject) + * {}); + * + * //- + * // For reference, the above command is the same as running the following. + * //- + * myFile.acl.add({ + * entity: 'user-email@example.com', + * role: gcs.acl.WRITER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.writers.addUser('email@example.com').then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + this.writers = {}; + AclRoleAccessorMethods.roles.forEach(this._assignAccessMethods.bind(this)); + } + _assignAccessMethods(role) { + const accessMethods = AclRoleAccessorMethods.accessMethods; + const entities = AclRoleAccessorMethods.entities; + const roleGroup = role.toLowerCase() + 's'; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this[roleGroup] = entities.reduce((acc, entity) => { + const isPrefix = entity.charAt(entity.length - 1) === '-'; + accessMethods.forEach(accessMethod => { + let method = accessMethod + entity[0].toUpperCase() + entity.substring(1); + if (isPrefix) { + method = method.replace('-', ''); + } + // Wrap the parent accessor method (e.g. `add` or `delete`) to avoid the + // more complex API of specifying an `entity` and `role`. + // eslint-disable-next-line @typescript-eslint/no-explicit-any + acc[method] = (entityId, options, callback) => { + let apiEntity; + if (typeof options === 'function') { + callback = options; + options = {}; + } + if (isPrefix) { + apiEntity = entity + entityId; + } + else { + // If the entity is not a prefix, it is a special entity group + // that does not require further details. The accessor methods + // only accept a callback. + apiEntity = entity; + callback = entityId; + } + options = Object.assign({ + entity: apiEntity, + role, + }, options); + const args = [options]; + if (typeof callback === 'function') { + args.push(callback); + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + return this[accessMethod].apply(this, args); + }; + }); + return acc; + }, {}); + } +} +exports.AclRoleAccessorMethods = AclRoleAccessorMethods; +AclRoleAccessorMethods.accessMethods = ['add', 'delete']; +AclRoleAccessorMethods.entities = [ + // Special entity groups that do not require further specification. + 'allAuthenticatedUsers', + 'allUsers', + // Entity groups that require specification, e.g. `user-email@example.com`. + 'domain-', + 'group-', + 'project-', + 'user-', +]; +AclRoleAccessorMethods.roles = ['OWNER', 'READER', 'WRITER']; +/** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * An ACL consists of one or more entries, where each entry grants permissions + * to an entity. Permissions define the actions that can be performed against an + * object or bucket (for example, `READ` or `WRITE`); the entity defines who the + * permission applies to (for example, a specific user or group of users). + * + * Where an `entity` value is accepted, we follow the format the Cloud Storage + * API expects. + * + * Refer to + * https://cloud.google.com/storage/docs/json_api/v1/defaultObjectAccessControls + * for the most up-to-date values. + * + * - `user-userId` + * - `user-email` + * - `group-groupId` + * - `group-email` + * - `domain-domain` + * - `project-team-projectId` + * - `allUsers` + * - `allAuthenticatedUsers` + * + * Examples: + * + * - The user "liz@example.com" would be `user-liz@example.com`. + * - The group "example@googlegroups.com" would be + * `group-example@googlegroups.com`. + * - To refer to all members of the Google Apps for Business domain + * "example.com", the entity would be `domain-example.com`. + * + * For more detailed information, see + * {@link http://goo.gl/6qBBPO| About Access Control Lists}. + * + * @constructor Acl + * @mixin + * @param {object} options Configuration options. + */ +class Acl extends AclRoleAccessorMethods { + constructor(options) { + super(); + this.pathPrefix = options.pathPrefix; + this.request_ = options.request; + } + /** + * @typedef {array} AddAclResponse + * @property {object} 0 The Acl Objects. + * @property {object} 1 The full API response. + */ + /** + * @callback AddAclCallback + * @param {?Error} err Request error, if any. + * @param {object} acl The Acl Objects. + * @param {object} apiResponse The full API response. + */ + /** + * Add access controls on a {@link Bucket} or {@link File}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/insert| BucketAccessControls: insert API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/insert| ObjectAccessControls: insert API Documentation} + * + * @param {object} options Configuration options. + * @param {string} options.entity Whose permissions will be added. + * @param {string} options.role Permissions allowed for the defined entity. + * See {@link https://cloud.google.com/storage/docs/access-control Access + * Control}. + * @param {number} [options.generation] **File Objects Only** Select a specific + * revision of this file (as opposed to the latest version, the default). + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {AddAclCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * const options = { + * entity: 'user-useremail@example.com', + * role: gcs.acl.OWNER_ROLE + * }; + * + * myBucket.acl.add(options, function(err, aclObject, apiResponse) {}); + * + * //- + * // For file ACL operations, you can also specify a `generation` property. + * // Here is how you would grant ownership permissions to a user on a + * specific + * // revision of a file. + * //- + * myFile.acl.add({ + * entity: 'user-useremail@example.com', + * role: gcs.acl.OWNER_ROLE, + * generation: 1 + * }, function(err, aclObject, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_add_file_owner + * Example of adding an owner to a file: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_owner + * Example of adding an owner to a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_default_owner + * Example of adding a default owner to a bucket: + */ + add(options, callback) { + const query = {}; + if (options.generation) { + query.generation = options.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + this.request({ + method: 'POST', + uri: '', + qs: query, + maxRetries: 0, //explicitly set this value since this is a non-idempotent function + json: { + entity: options.entity, + role: options.role.toUpperCase(), + }, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + callback(null, this.makeAclObject_(resp), resp); + }); + } + /** + * @typedef {array} RemoveAclResponse + * @property {object} 0 The full API response. + */ + /** + * @callback RemoveAclCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Delete access controls on a {@link Bucket} or {@link File}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/delete| BucketAccessControls: delete API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/delete| ObjectAccessControls: delete API Documentation} + * + * @param {object} options Configuration object. + * @param {string} options.entity Whose permissions will be revoked. + * @param {int} [options.generation] **File Objects Only** Select a specific + * revision of this file (as opposed to the latest version, the default). + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {RemoveAclCallback} callback The callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * myBucket.acl.delete({ + * entity: 'user-useremail@example.com' + * }, function(err, apiResponse) {}); + * + * //- + * // For file ACL operations, you can also specify a `generation` property. + * //- + * myFile.acl.delete({ + * entity: 'user-useremail@example.com', + * generation: 1 + * }, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.delete().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_owner + * Example of removing an owner from a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_default_owner + * Example of removing a default owner from a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_file_owner + * Example of removing an owner from a bucket: + */ + delete(options, callback) { + const query = {}; + if (options.generation) { + query.generation = options.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + this.request({ + method: 'DELETE', + uri: '/' + encodeURIComponent(options.entity), + qs: query, + }, (err, resp) => { + callback(err, resp); + }); + } + /** + * @typedef {array} GetAclResponse + * @property {object|object[]} 0 Single or array of Acl Objects. + * @property {object} 1 The full API response. + */ + /** + * @callback GetAclCallback + * @param {?Error} err Request error, if any. + * @param {object|object[]} acl Single or array of Acl Objects. + * @param {object} apiResponse The full API response. + */ + /** + * Get access controls on a {@link Bucket} or {@link File}. If + * an entity is omitted, you will receive an array of all applicable access + * controls. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/get| BucketAccessControls: get API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/get| ObjectAccessControls: get API Documentation} + * + * @param {object|function} [options] Configuration options. If you want to + * receive a list of all access controls, pass the callback function as + * the only argument. + * @param {string} options.entity Whose permissions will be fetched. + * @param {number} [options.generation] **File Objects Only** Select a specific + * revision of this file (as opposed to the latest version, the default). + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetAclCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * myBucket.acl.get({ + * entity: 'user-useremail@example.com' + * }, function(err, aclObject, apiResponse) {}); + * + * //- + * // Get all access controls. + * //- + * myBucket.acl.get(function(err, aclObjects, apiResponse) { + * // aclObjects = [ + * // { + * // entity: 'user-useremail@example.com', + * // role: 'owner' + * // } + * // ] + * }); + * + * //- + * // For file ACL operations, you can also specify a `generation` property. + * //- + * myFile.acl.get({ + * entity: 'user-useremail@example.com', + * generation: 1 + * }, function(err, aclObject, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.acl.get().then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_print_file_acl + * Example of printing a file's ACL: + * + * @example include:samples/acl.js + * region_tag:storage_print_file_acl_for_user + * Example of printing a file's ACL for a specific user: + * + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl + * Example of printing a bucket's ACL: + * + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl_for_user + * Example of printing a bucket's ACL for a specific user: + */ + get(optionsOrCallback, cb) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : null; + const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + let path = ''; + const query = {}; + if (options) { + path = '/' + encodeURIComponent(options.entity); + if (options.generation) { + query.generation = options.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + } + this.request({ + uri: path, + qs: query, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + let results; + if (resp.items) { + results = resp.items.map(this.makeAclObject_); + } + else { + results = this.makeAclObject_(resp); + } + callback(null, results, resp); + }); + } + /** + * @typedef {array} UpdateAclResponse + * @property {object} 0 The updated Acl Objects. + * @property {object} 1 The full API response. + */ + /** + * @callback UpdateAclCallback + * @param {?Error} err Request error, if any. + * @param {object} acl The updated Acl Objects. + * @param {object} apiResponse The full API response. + */ + /** + * Update access controls on a {@link Bucket} or {@link File}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/update| BucketAccessControls: update API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/update| ObjectAccessControls: update API Documentation} + * + * @param {object} options Configuration options. + * @param {string} options.entity Whose permissions will be updated. + * @param {string} options.role Permissions allowed for the defined entity. + * See {@link Storage.acl}. + * @param {number} [options.generation] **File Objects Only** Select a specific + * revision of this file (as opposed to the latest version, the default). + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {UpdateAclCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * const myFile = myBucket.file('my-file'); + * + * const options = { + * entity: 'user-useremail@example.com', + * role: gcs.acl.WRITER_ROLE + * }; + * + * myBucket.acl.update(options, function(err, aclObject, apiResponse) {}); + * + * //- + * // For file ACL operations, you can also specify a `generation` property. + * //- + * myFile.acl.update({ + * entity: 'user-useremail@example.com', + * role: gcs.acl.WRITER_ROLE, + * generation: 1 + * }, function(err, aclObject, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myFile.acl.update(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + update(options, callback) { + const query = {}; + if (options.generation) { + query.generation = options.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + this.request({ + method: 'PUT', + uri: '/' + encodeURIComponent(options.entity), + qs: query, + json: { + role: options.role.toUpperCase(), + }, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + callback(null, this.makeAclObject_(resp), resp); + }); + } + /** + * Transform API responses to a consistent object format. + * + * @private + */ + makeAclObject_(accessControlObject) { + const obj = { + entity: accessControlObject.entity, + role: accessControlObject.role, + }; + if (accessControlObject.projectTeam) { + obj.projectTeam = accessControlObject.projectTeam; + } + return obj; + } + /** + * Patch requests up to the bucket's request object. + * + * @private + * + * @param {string} method Action. + * @param {string} path Request path. + * @param {*} query Request query object. + * @param {*} body Request body contents. + * @param {function} callback Callback function. + */ + request(reqOpts, callback) { + reqOpts.uri = this.pathPrefix + reqOpts.uri; + this.request_(reqOpts, callback); + } +} +exports.Acl = Acl; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Acl, { + exclude: ['request'], +}); + + +/***/ }), + +/***/ 2887: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Bucket = exports.BucketExceptionMessages = exports.AvailableServiceObjectMethods = exports.BucketActionToHTTPMethod = void 0; +const index_js_1 = __nccwpck_require__(1325); +const paginator_1 = __nccwpck_require__(9469); +const promisify_1 = __nccwpck_require__(206); +const fs = __importStar(__nccwpck_require__(9896)); +const mime_1 = __importDefault(__nccwpck_require__(4900)); +const path = __importStar(__nccwpck_require__(6928)); +const p_limit_1 = __importDefault(__nccwpck_require__(9977)); +const util_1 = __nccwpck_require__(9023); +const async_retry_1 = __importDefault(__nccwpck_require__(5195)); +const util_js_1 = __nccwpck_require__(4557); +const acl_js_1 = __nccwpck_require__(6637); +const file_js_1 = __nccwpck_require__(9975); +const iam_js_1 = __nccwpck_require__(2880); +const notification_js_1 = __nccwpck_require__(8598); +const storage_js_1 = __nccwpck_require__(2848); +const signer_js_1 = __nccwpck_require__(7319); +const stream_1 = __nccwpck_require__(2203); +const url_1 = __nccwpck_require__(7016); +var BucketActionToHTTPMethod; +(function (BucketActionToHTTPMethod) { + BucketActionToHTTPMethod["list"] = "GET"; +})(BucketActionToHTTPMethod || (exports.BucketActionToHTTPMethod = BucketActionToHTTPMethod = {})); +var AvailableServiceObjectMethods; +(function (AvailableServiceObjectMethods) { + AvailableServiceObjectMethods[AvailableServiceObjectMethods["setMetadata"] = 0] = "setMetadata"; + AvailableServiceObjectMethods[AvailableServiceObjectMethods["delete"] = 1] = "delete"; +})(AvailableServiceObjectMethods || (exports.AvailableServiceObjectMethods = AvailableServiceObjectMethods = {})); +var BucketExceptionMessages; +(function (BucketExceptionMessages) { + BucketExceptionMessages["PROVIDE_SOURCE_FILE"] = "You must provide at least one source file."; + BucketExceptionMessages["DESTINATION_FILE_NOT_SPECIFIED"] = "A destination file must be specified."; + BucketExceptionMessages["CHANNEL_ID_REQUIRED"] = "An ID is required to create a channel."; + BucketExceptionMessages["TOPIC_NAME_REQUIRED"] = "A valid topic name is required."; + BucketExceptionMessages["CONFIGURATION_OBJECT_PREFIX_REQUIRED"] = "A configuration object with a prefix is required."; + BucketExceptionMessages["SPECIFY_FILE_NAME"] = "A file name must be specified."; + BucketExceptionMessages["METAGENERATION_NOT_PROVIDED"] = "A metageneration must be provided."; + BucketExceptionMessages["SUPPLY_NOTIFICATION_ID"] = "You must supply a notification ID."; +})(BucketExceptionMessages || (exports.BucketExceptionMessages = BucketExceptionMessages = {})); +/** + * @callback Crc32cGeneratorToStringCallback + * A method returning the CRC32C as a base64-encoded string. + * + * @returns {string} + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ +/** + * @callback Crc32cGeneratorValidateCallback + * A method validating a base64-encoded CRC32C string. + * + * @param {string} [value] base64-encoded CRC32C string to validate + * @returns {boolean} + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + **/ +/** + * @callback Crc32cGeneratorUpdateCallback + * A method for passing `Buffer`s for CRC32C generation. + * + * @param {Buffer} [data] data to update CRC32C value with + * @returns {undefined} + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + **/ +/** + * @typedef {object} CRC32CValidator + * @property {Crc32cGeneratorToStringCallback} + * @property {Crc32cGeneratorValidateCallback} + * @property {Crc32cGeneratorUpdateCallback} + */ +/** + * A function that generates a CRC32C Validator. Defaults to {@link CRC32C} + * + * @name Bucket#crc32cGenerator + * @type {CRC32CValidator} + */ +/** + * Get and set IAM policies for your bucket. + * + * @name Bucket#iam + * @mixes Iam + * + * See {@link https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management| Cloud Storage IAM Management} + * See {@link https://cloud.google.com/iam/docs/granting-changing-revoking-access| Granting, Changing, and Revoking Access} + * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Get the IAM policy for your bucket. + * //- + * bucket.iam.getPolicy(function(err, policy) { + * console.log(policy); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.getPolicy().then(function(data) { + * const policy = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/iam.js + * region_tag:storage_view_bucket_iam_members + * Example of retrieving a bucket's IAM policy: + * + * @example include:samples/iam.js + * region_tag:storage_add_bucket_iam_member + * Example of adding to a bucket's IAM policy: + * + * @example include:samples/iam.js + * region_tag:storage_remove_bucket_iam_member + * Example of removing from a bucket's IAM policy: + */ +/** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * An ACL consists of one or more entries, where each entry grants permissions + * to an entity. Permissions define the actions that can be performed against + * an object or bucket (for example, `READ` or `WRITE`); the entity defines + * who the permission applies to (for example, a specific user or group of + * users). + * + * The `acl` object on a Bucket instance provides methods to get you a list of + * the ACLs defined on your bucket, as well as set, update, and delete them. + * + * Buckets also have + * {@link https://cloud.google.com/storage/docs/access-control/lists#default| default ACLs} + * for all created files. Default ACLs specify permissions that all new + * objects added to the bucket will inherit by default. You can add, delete, + * get, and update entities and permissions for these as well with + * {@link Bucket#acl.default}. + * + * See {@link http://goo.gl/6qBBPO| About Access Control Lists} + * See {@link https://cloud.google.com/storage/docs/access-control/lists#default| Default ACLs} + * + * @name Bucket#acl + * @mixes Acl + * @property {Acl} default Cloud Storage Buckets have + * {@link https://cloud.google.com/storage/docs/access-control/lists#default| default ACLs} + * for all created files. You can add, delete, get, and update entities and + * permissions for these as well. The method signatures and examples are all + * the same, after only prefixing the method call with `default`. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Make a bucket's contents publicly readable. + * //- + * const myBucket = storage.bucket('my-bucket'); + * + * const options = { + * entity: 'allUsers', + * role: storage.acl.READER_ROLE + * }; + * + * myBucket.acl.add(options, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl + * Example of printing a bucket's ACL: + * + * @example include:samples/acl.js + * region_tag:storage_print_bucket_acl_for_user + * Example of printing a bucket's ACL for a specific user: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_owner + * Example of adding an owner to a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_owner + * Example of removing an owner from a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_add_bucket_default_owner + * Example of adding a default owner to a bucket: + * + * @example include:samples/acl.js + * region_tag:storage_remove_bucket_default_owner + * Example of removing a default owner from a bucket: + */ +/** + * The API-formatted resource description of the bucket. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name Bucket#metadata + * @type {object} + */ +/** + * The bucket's name. + * @name Bucket#name + * @type {string} + */ +/** + * Get {@link File} objects for the files currently in the bucket as a + * readable object stream. + * + * @method Bucket#getFilesStream + * @param {GetFilesOptions} [query] Query object for listing files. + * @returns {ReadableStream} A readable stream that emits {@link File} instances. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getFilesStream() + * .on('error', console.error) + * .on('data', function(file) { + * // file is a File object. + * }) + * .on('end', function() { + * // All files retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * bucket.getFilesStream() + * .on('data', function(file) { + * this.end(); + * }); + * + * //- + * // If you're filtering files with a delimiter, you should use + * // {@link Bucket#getFiles} and set `autoPaginate: false` in order to + * // preserve the `apiResponse` argument. + * //- + * const prefixes = []; + * + * function callback(err, files, nextQuery, apiResponse) { + * prefixes = prefixes.concat(apiResponse.prefixes); + * + * if (nextQuery) { + * bucket.getFiles(nextQuery, callback); + * } else { + * // prefixes = The finished array of prefixes. + * } + * } + * + * bucket.getFiles({ + * autoPaginate: false, + * delimiter: '/' + * }, callback); + * ``` + */ +/** + * Create a Bucket object to interact with a Cloud Storage bucket. + * + * @class + * @hideconstructor + * + * @param {Storage} storage A {@link Storage} instance. + * @param {string} name The name of the bucket. + * @param {object} [options] Configuration object. + * @param {string} [options.userProject] User project. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * ``` + */ +class Bucket extends index_js_1.ServiceObject { + // eslint-disable-next-line @typescript-eslint/no-unused-vars + getFilesStream(query) { + // placeholder body, overwritten in constructor + return new stream_1.Readable(); + } + constructor(storage, name, options) { + var _a, _b, _c, _d; + options = options || {}; + // Allow for "gs://"-style input, and strip any trailing slashes. + name = name.replace(/^gs:\/\//, '').replace(/\/+$/, ''); + const requestQueryObject = {}; + if ((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) { + requestQueryObject.ifGenerationMatch = + options.preconditionOpts.ifGenerationMatch; + } + if ((_b = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationNotMatch) { + requestQueryObject.ifGenerationNotMatch = + options.preconditionOpts.ifGenerationNotMatch; + } + if ((_c = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _c === void 0 ? void 0 : _c.ifMetagenerationMatch) { + requestQueryObject.ifMetagenerationMatch = + options.preconditionOpts.ifMetagenerationMatch; + } + if ((_d = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _d === void 0 ? void 0 : _d.ifMetagenerationNotMatch) { + requestQueryObject.ifMetagenerationNotMatch = + options.preconditionOpts.ifMetagenerationNotMatch; + } + const userProject = options.userProject; + if (typeof userProject === 'string') { + requestQueryObject.userProject = userProject; + } + const methods = { + /** + * Create a bucket. + * + * @method Bucket#create + * @param {CreateBucketRequest} [metadata] Metadata to set for the bucket. + * @param {CreateBucketCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * bucket.create(function(err, bucket, apiResponse) { + * if (!err) { + * // The bucket was created successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.create().then(function(data) { + * const bucket = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + create: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * IamDeleteBucketOptions Configuration options. + * @property {boolean} [ignoreNotFound = false] Ignore an error if + * the bucket does not exist. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} DeleteBucketResponse + * @property {object} 0 The full API response. + */ + /** + * @callback DeleteBucketCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Delete the bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/delete| Buckets: delete API Documentation} + * + * @method Bucket#delete + * @param {DeleteBucketOptions} [options] Configuration options. + * @param {boolean} [options.ignoreNotFound = false] Ignore an error if + * the bucket does not exist. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {DeleteBucketCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * bucket.delete(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.delete().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/buckets.js + * region_tag:storage_delete_bucket + * Another example: + */ + delete: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {object} BucketExistsOptions Configuration options for Bucket#exists(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} BucketExistsResponse + * @property {boolean} 0 Whether the {@link Bucket} exists. + */ + /** + * @callback BucketExistsCallback + * @param {?Error} err Request error, if any. + * @param {boolean} exists Whether the {@link Bucket} exists. + */ + /** + * Check if the bucket exists. + * + * @method Bucket#exists + * @param {BucketExistsOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {BucketExistsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.exists(function(err, exists) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.exists().then(function(data) { + * const exists = data[0]; + * }); + * ``` + */ + exists: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {object} [GetBucketOptions] Configuration options for Bucket#get() + * @property {boolean} [autoCreate] Automatically create the object if + * it does not exist. Default: `false` + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} GetBucketResponse + * @property {Bucket} 0 The {@link Bucket}. + * @property {object} 1 The full API response. + */ + /** + * @callback GetBucketCallback + * @param {?Error} err Request error, if any. + * @param {Bucket} bucket The {@link Bucket}. + * @param {object} apiResponse The full API response. + */ + /** + * Get a bucket if it exists. + * + * You may optionally use this to "get or create" an object by providing + * an object with `autoCreate` set to `true`. Any extra configuration that + * is normally required for the `create` method must be contained within + * this object as well. + * + * @method Bucket#get + * @param {GetBucketOptions} [options] Configuration options. + * @param {boolean} [options.autoCreate] Automatically create the object if + * it does not exist. Default: `false` + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetBucketCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.get(function(err, bucket, apiResponse) { + * // `bucket.metadata` has been populated. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.get().then(function(data) { + * const bucket = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + get: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} GetBucketMetadataResponse + * @property {object} 0 The bucket metadata. + * @property {object} 1 The full API response. + */ + /** + * @callback GetBucketMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata The bucket metadata. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} GetBucketMetadataOptions Configuration options for Bucket#getMetadata(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Get the bucket's metadata. + * + * To set metadata, see {@link Bucket#setMetadata}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/get| Buckets: get API Documentation} + * + * @method Bucket#getMetadata + * @param {GetBucketMetadataOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetBucketMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getMetadata(function(err, metadata, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getMetadata().then(function(data) { + * const metadata = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/requesterPays.js + * region_tag:storage_get_requester_pays_status + * Example of retrieving the requester pays status of a bucket: + */ + getMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {object} SetBucketMetadataOptions Configuration options for Bucket#setMetadata(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} SetBucketMetadataResponse + * @property {object} apiResponse The full API response. + */ + /** + * @callback SetBucketMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata The bucket metadata. + */ + /** + * Set the bucket's metadata. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} + * + * @method Bucket#setMetadata + * @param {object} metadata The metadata you wish to set. + * @param {SetBucketMetadataOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Set website metadata field on the bucket. + * //- + * const metadata = { + * website: { + * mainPageSuffix: 'http://example.com', + * notFoundPage: 'http://example.com/404.html' + * } + * }; + * + * bucket.setMetadata(metadata, function(err, apiResponse) {}); + * + * //- + * // Enable versioning for your bucket. + * //- + * bucket.setMetadata({ + * versioning: { + * enabled: true + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Enable KMS encryption for objects within this bucket. + * //- + * bucket.setMetadata({ + * encryption: { + * defaultKmsKeyName: 'projects/grape-spaceship-123/...' + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Set the default event-based hold value for new objects in this + * // bucket. + * //- + * bucket.setMetadata({ + * defaultEventBasedHold: true + * }, function(err, apiResponse) {}); + * + * //- + * // Remove object lifecycle rules. + * //- + * bucket.setMetadata({ + * lifecycle: null + * }, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setMetadata(metadata).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + setMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + }; + super({ + parent: storage, + baseUrl: '/b', + id: name, + createMethod: storage.createBucket.bind(storage), + methods, + }); + this.name = name; + this.storage = storage; + this.userProject = options.userProject; + this.acl = new acl_js_1.Acl({ + request: this.request.bind(this), + pathPrefix: '/acl', + }); + this.acl.default = new acl_js_1.Acl({ + request: this.request.bind(this), + pathPrefix: '/defaultObjectAcl', + }); + this.crc32cGenerator = + options.crc32cGenerator || this.storage.crc32cGenerator; + this.iam = new iam_js_1.Iam(this); + this.getFilesStream = paginator_1.paginator.streamify('getFiles'); + this.instanceRetryValue = storage.retryOptions.autoRetry; + this.instancePreconditionOpts = options === null || options === void 0 ? void 0 : options.preconditionOpts; + } + /** + * The bucket's Cloud Storage URI (`gs://`) + * + * @example + * ```ts + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * // `gs://my-bucket` + * const href = bucket.cloudStorageURI.href; + * ``` + */ + get cloudStorageURI() { + const uri = new url_1.URL('gs://'); + uri.host = this.name; + return uri; + } + /** + * @typedef {object} AddLifecycleRuleOptions Configuration options for Bucket#addLifecycleRule(). + * @property {boolean} [append=true] The new rules will be appended to any + * pre-existing rules. + */ + /** + * + * @typedef {object} LifecycleRule The new lifecycle rule to be added to objects + * in this bucket. + * @property {string|object} action The action to be taken upon matching of + * all the conditions 'delete', 'setStorageClass', or 'AbortIncompleteMultipartUpload'. + * **Note**: For configuring a raw-formatted rule object to be passed as `action` + * please refer to the [examples]{@link https://cloud.google.com/storage/docs/managing-lifecycles#configexamples}. + * @property {object} condition Condition a bucket must meet before the + * action occurs on the bucket. Refer to following supported [conditions]{@link https://cloud.google.com/storage/docs/lifecycle#conditions}. + * @property {string} [storageClass] When using the `setStorageClass` + * action, provide this option to dictate which storage class the object + * should update to. Please see + * [SetStorageClass option documentation]{@link https://cloud.google.com/storage/docs/lifecycle#setstorageclass} for supported transitions. + */ + /** + * Add an object lifecycle management rule to the bucket. + * + * By default, an Object Lifecycle Management rule provided to this method + * will be included to the existing policy. To replace all existing rules, + * supply the `options` argument, setting `append` to `false`. + * + * To add multiple rules, pass a list to the `rule` parameter. Calling this + * function multiple times asynchronously does not guarantee that all rules + * are added correctly. + * + * See {@link https://cloud.google.com/storage/docs/lifecycle| Object Lifecycle Management} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} + * + * @param {LifecycleRule|LifecycleRule[]} rule The new lifecycle rule or rules to be added to objects + * in this bucket. + * @param {string|object} rule.action The action to be taken upon matching of + * all the conditions 'delete', 'setStorageClass', or 'AbortIncompleteMultipartUpload'. + * **Note**: For configuring a raw-formatted rule object to be passed as `action` + * please refer to the [examples]{@link https://cloud.google.com/storage/docs/managing-lifecycles#configexamples}. + * @param {object} rule.condition Condition a bucket must meet before the + * action occurson the bucket. Refer to followitn supported [conditions]{@link https://cloud.google.com/storage/docs/lifecycle#conditions}. + * @param {string} [rule.storageClass] When using the `setStorageClass` + * action, provide this option to dictate which storage class the object + * should update to. + * @param {AddLifecycleRuleOptions} [options] Configuration object. + * @param {boolean} [options.append=true] Append the new rule to the existing + * policy. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Automatically have an object deleted from this bucket once it is 3 years + * // of age. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * age: 365 * 3 // Specified in days. + * } + * }, function(err, apiResponse) { + * if (err) { + * // Error handling omitted. + * } + * + * const lifecycleRules = bucket.metadata.lifecycle.rule; + * + * // Iterate over the Object Lifecycle Management rules on this bucket. + * lifecycleRules.forEach(lifecycleRule => {}); + * }); + * + * //- + * // By default, the rule you provide will be added to the existing policy. + * // Optionally, you can disable this behavior to replace all of the + * // pre-existing rules. + * //- + * const options = { + * append: false + * }; + * + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * age: 365 * 3 // Specified in days. + * } + * }, options, function(err, apiResponse) { + * if (err) { + * // Error handling omitted. + * } + * + * // All rules have been replaced with the new "delete" rule. + * + * // Iterate over the Object Lifecycle Management rules on this bucket. + * lifecycleRules.forEach(lifecycleRule => {}); + * }); + * + * //- + * // For objects created before 2018, "downgrade" the storage class. + * //- + * bucket.addLifecycleRule({ + * action: 'setStorageClass', + * storageClass: 'COLDLINE', + * condition: { + * createdBefore: new Date('2018') + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Delete objects created before 2016 which have the Coldline storage + * // class. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * matchesStorageClass: [ + * 'COLDLINE' + * ], + * createdBefore: new Date('2016') + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Delete object that has a noncurrent timestamp that is at least 100 days. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * daysSinceNoncurrentTime: 100 + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Delete object that has a noncurrent timestamp before 2020-01-01. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * noncurrentTimeBefore: new Date('2020-01-01') + * } + * }, function(err, apiResponse) {}); + * + * //- + * // Delete object that has a customTime that is at least 100 days. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * daysSinceCustomTime: 100 + * } + * }, function(err, apiResponse) ()); + * + * //- + * // Delete object that has a customTime before 2020-01-01. + * //- + * bucket.addLifecycleRule({ + * action: 'delete', + * condition: { + * customTimeBefore: new Date('2020-01-01') + * } + * }, function(err, apiResponse) {}); + * ``` + */ + addLifecycleRule(rule, optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + options = options || {}; + const rules = Array.isArray(rule) ? rule : [rule]; + for (const curRule of rules) { + if (curRule.condition.createdBefore instanceof Date) { + curRule.condition.createdBefore = curRule.condition.createdBefore + .toISOString() + .replace(/T.+$/, ''); + } + if (curRule.condition.customTimeBefore instanceof Date) { + curRule.condition.customTimeBefore = curRule.condition.customTimeBefore + .toISOString() + .replace(/T.+$/, ''); + } + if (curRule.condition.noncurrentTimeBefore instanceof Date) { + curRule.condition.noncurrentTimeBefore = + curRule.condition.noncurrentTimeBefore + .toISOString() + .replace(/T.+$/, ''); + } + } + if (options.append === false) { + this.setMetadata({ lifecycle: { rule: rules } }, options, callback); + return; + } + // The default behavior appends the previously-defined lifecycle rules with + // the new ones just passed in by the user. + this.getMetadata((err, metadata) => { + var _a, _b; + if (err) { + callback(err); + return; + } + const currentLifecycleRules = Array.isArray((_a = metadata.lifecycle) === null || _a === void 0 ? void 0 : _a.rule) + ? (_b = metadata.lifecycle) === null || _b === void 0 ? void 0 : _b.rule + : []; + this.setMetadata({ + lifecycle: { rule: currentLifecycleRules.concat(rules) }, + }, options, callback); + }); + } + /** + * @typedef {object} CombineOptions + * @property {string} [kmsKeyName] Resource name of the Cloud KMS key, of + * the form + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`, + * that will be used to encrypt the object. Overwrites the object + * metadata's `kms_key_name` value, if any. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback CombineCallback + * @param {?Error} err Request error, if any. + * @param {File} newFile The new {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} CombineResponse + * @property {File} 0 The new {@link File}. + * @property {object} 1 The full API response. + */ + /** + * Combine multiple files into one new file. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/compose| Objects: compose API Documentation} + * + * @throws {Error} if a non-array is provided as sources argument. + * @throws {Error} if no sources are provided. + * @throws {Error} if no destination is provided. + * + * @param {string[]|File[]} sources The source files that will be + * combined. + * @param {string|File} destination The file you would like the + * source files combined into. + * @param {CombineOptions} [options] Configuration options. + * @param {string} [options.kmsKeyName] Resource name of the Cloud KMS key, of + * the form + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`, + * that will be used to encrypt the object. Overwrites the object + * metadata's `kms_key_name` value, if any. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + + * @param {CombineCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const logBucket = storage.bucket('log-bucket'); + * + * const sources = [ + * logBucket.file('2013-logs.txt'), + * logBucket.file('2014-logs.txt') + * ]; + * + * const allLogs = logBucket.file('all-logs.txt'); + * + * logBucket.combine(sources, allLogs, function(err, newFile, apiResponse) { + * // newFile === allLogs + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * logBucket.combine(sources, allLogs).then(function(data) { + * const newFile = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + combine(sources, destination, optionsOrCallback, callback) { + var _a; + if (!Array.isArray(sources) || sources.length === 0) { + throw new Error(BucketExceptionMessages.PROVIDE_SOURCE_FILE); + } + if (!destination) { + throw new Error(BucketExceptionMessages.DESTINATION_FILE_NOT_SPECIFIED); + } + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, // Not relevant but param is required + AvailableServiceObjectMethods.setMetadata, // Same as above + options); + const convertToFile = (file) => { + if (file instanceof file_js_1.File) { + return file; + } + return this.file(file); + }; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + sources = sources.map(convertToFile); + const destinationFile = convertToFile(destination); + callback = callback || index_js_1.util.noop; + if (!destinationFile.metadata.contentType) { + const destinationContentType = mime_1.default.getType(destinationFile.name) || undefined; + if (destinationContentType) { + destinationFile.metadata.contentType = destinationContentType; + } + } + let maxRetries = this.storage.retryOptions.maxRetries; + if ((((_a = destinationFile === null || destinationFile === void 0 ? void 0 : destinationFile.instancePreconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === + undefined && + options.ifGenerationMatch === undefined && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + maxRetries = 0; + } + if (options.ifGenerationMatch === undefined) { + Object.assign(options, destinationFile.instancePreconditionOpts, options); + } + // Make the request from the destination File object. + destinationFile.request({ + method: 'POST', + uri: '/compose', + maxRetries, + json: { + destination: { + contentType: destinationFile.metadata.contentType, + contentEncoding: destinationFile.metadata.contentEncoding, + }, + sourceObjects: sources.map(source => { + const sourceObject = { + name: source.name, + }; + if (source.metadata && source.metadata.generation) { + sourceObject.generation = parseInt(source.metadata.generation.toString()); + } + return sourceObject; + }), + }, + qs: options, + }, (err, resp) => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + if (err) { + callback(err, null, resp); + return; + } + callback(null, destinationFile, resp); + }); + } + /** + * See a {@link https://cloud.google.com/storage/docs/json_api/v1/objects/watchAll| Objects: watchAll request body}. + * + * @typedef {object} CreateChannelConfig + * @property {string} address The address where notifications are + * delivered for this channel. + * @property {string} [delimiter] Returns results in a directory-like mode. + * @property {number} [maxResults] Maximum number of `items` plus `prefixes` + * to return in a single page of responses. + * @property {string} [pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @property {string} [prefix] Filter results to objects whose names begin + * with this prefix. + * @property {string} [projection=noAcl] Set of properties to return. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {boolean} [versions=false] If `true`, lists all versions of an object + * as distinct results. + */ + /** + * @typedef {object} CreateChannelOptions + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} CreateChannelResponse + * @property {Channel} 0 The new {@link Channel}. + * @property {object} 1 The full API response. + */ + /** + * @callback CreateChannelCallback + * @param {?Error} err Request error, if any. + * @param {Channel} channel The new {@link Channel}. + * @param {object} apiResponse The full API response. + */ + /** + * Create a channel that will be notified when objects in this bucket changes. + * + * @throws {Error} If an ID is not provided. + * @throws {Error} If an address is not provided. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/watchAll| Objects: watchAll API Documentation} + * + * @param {string} id The ID of the channel to create. + * @param {CreateChannelConfig} config Configuration for creating channel. + * @param {string} config.address The address where notifications are + * delivered for this channel. + * @param {string} [config.delimiter] Returns results in a directory-like mode. + * @param {number} [config.maxResults] Maximum number of `items` plus `prefixes` + * to return in a single page of responses. + * @param {string} [config.pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @param {string} [config.prefix] Filter results to objects whose names begin + * with this prefix. + * @param {string} [config.projection=noAcl] Set of properties to return. + * @param {string} [config.userProject] The ID of the project which will be + * billed for the request. + * @param {boolean} [config.versions=false] If `true`, lists all versions of an object + * as distinct results. + * @param {CreateChannelOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {CreateChannelCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const id = 'new-channel-id'; + * + * const config = { + * address: 'https://...' + * }; + * + * bucket.createChannel(id, config, function(err, channel, apiResponse) { + * if (!err) { + * // Channel created successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.createChannel(id, config).then(function(data) { + * const channel = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + createChannel(id, config, optionsOrCallback, callback) { + if (typeof id !== 'string') { + throw new Error(BucketExceptionMessages.CHANNEL_ID_REQUIRED); + } + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.request({ + method: 'POST', + uri: '/o/watch', + json: Object.assign({ + id, + type: 'web_hook', + }, config), + qs: options, + }, (err, apiResponse) => { + if (err) { + callback(err, null, apiResponse); + return; + } + const resourceId = apiResponse.resourceId; + const channel = this.storage.channel(id, resourceId); + channel.metadata = apiResponse; + callback(null, channel, apiResponse); + }); + } + /** + * Metadata to set for the Notification. + * + * @typedef {object} CreateNotificationOptions + * @property {object} [customAttributes] An optional list of additional + * attributes to attach to each Cloud PubSub message published for this + * notification subscription. + * @property {string[]} [eventTypes] If present, only send notifications about + * listed event types. If empty, sent notifications for all event types. + * @property {string} [objectNamePrefix] If present, only apply this + * notification configuration to object names that begin with this prefix. + * @property {string} [payloadFormat] The desired content of the Payload. + * Defaults to `JSON_API_V1`. + * + * Acceptable values are: + * - `JSON_API_V1` + * + * - `NONE` + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback CreateNotificationCallback + * @param {?Error} err Request error, if any. + * @param {Notification} notification The new {@link Notification}. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} CreateNotificationResponse + * @property {Notification} 0 The new {@link Notification}. + * @property {object} 1 The full API response. + */ + /** + * Creates a notification subscription for the bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/insert| Notifications: insert} + * + * @param {Topic|string} topic The Cloud PubSub topic to which this + * subscription publishes. If the project ID is omitted, the current + * project ID will be used. + * + * Acceptable formats are: + * - `projects/grape-spaceship-123/topics/my-topic` + * + * - `my-topic` + * @param {CreateNotificationOptions} [options] Metadata to set for the + * notification. + * @param {object} [options.customAttributes] An optional list of additional + * attributes to attach to each Cloud PubSub message published for this + * notification subscription. + * @param {string[]} [options.eventTypes] If present, only send notifications about + * listed event types. If empty, sent notifications for all event types. + * @param {string} [options.objectNamePrefix] If present, only apply this + * notification configuration to object names that begin with this prefix. + * @param {string} [options.payloadFormat] The desired content of the Payload. + * Defaults to `JSON_API_V1`. + * + * Acceptable values are: + * - `JSON_API_V1` + * + * - `NONE` + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {CreateNotificationCallback} [callback] Callback function. + * @returns {Promise} + * @throws {Error} If a valid topic is not provided. + * @see Notification#create + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const callback = function(err, notification, apiResponse) { + * if (!err) { + * // The notification was created successfully. + * } + * }; + * + * myBucket.createNotification('my-topic', callback); + * + * //- + * // Configure the nofiication by providing Notification metadata. + * //- + * const metadata = { + * objectNamePrefix: 'prefix-' + * }; + * + * myBucket.createNotification('my-topic', metadata, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * myBucket.createNotification('my-topic').then(function(data) { + * const notification = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/createNotification.js + * region_tag:storage_create_bucket_notifications + * Another example: + */ + createNotification(topic, optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + const topicIsObject = topic !== null && typeof topic === 'object'; + if (topicIsObject && index_js_1.util.isCustomType(topic, 'pubsub/topic')) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + topic = topic.name; + } + if (typeof topic !== 'string') { + throw new Error(BucketExceptionMessages.TOPIC_NAME_REQUIRED); + } + const body = Object.assign({ topic }, options); + if (body.topic.indexOf('projects') !== 0) { + body.topic = 'projects/{{projectId}}/topics/' + body.topic; + } + body.topic = `//pubsub.${this.storage.universeDomain}/` + body.topic; + if (!body.payloadFormat) { + body.payloadFormat = 'JSON_API_V1'; + } + const query = {}; + if (body.userProject) { + query.userProject = body.userProject; + delete body.userProject; + } + this.request({ + method: 'POST', + uri: '/notificationConfigs', + json: (0, util_js_1.convertObjKeysToSnakeCase)(body), + qs: query, + maxRetries: 0, //explicitly set this value since this is a non-idempotent function + }, (err, apiResponse) => { + if (err) { + callback(err, null, apiResponse); + return; + } + const notification = this.notification(apiResponse.id); + notification.metadata = apiResponse; + callback(null, notification, apiResponse); + }); + } + /** + * @typedef {object} DeleteFilesOptions Query object. See {@link Bucket#getFiles} + * for all of the supported properties. + * @property {boolean} [force] Suppress errors until all files have been + * processed. + */ + /** + * @callback DeleteFilesCallback + * @param {?Error|?Error[]} err Request error, if any, or array of errors from + * files that were not able to be deleted. + * @param {object} [apiResponse] The full API response. + */ + /** + * Iterate over the bucket's files, calling `file.delete()` on each. + * + * This is not an atomic request. A delete attempt will be + * made for each file individually. Any one can fail, in which case only a + * portion of the files you intended to be deleted would have. + * + * Operations are performed in parallel, up to 10 at once. The first error + * breaks the loop and will execute the provided callback with it. Specify + * `{ force: true }` to suppress the errors until all files have had a chance + * to be processed. + * + * File preconditions cannot be passed to this function. It will not retry unless + * the idempotency strategy is set to retry always. + * + * The `query` object passed as the first argument will also be passed to + * {@link Bucket#getFiles}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/delete| Objects: delete API Documentation} + * + * @param {DeleteFilesOptions} [query] Query object. See {@link Bucket#getFiles} + * @param {boolean} [query.force] Suppress errors until all files have been + * processed. + * @param {DeleteFilesCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Delete all of the files in the bucket. + * //- + * bucket.deleteFiles(function(err) {}); + * + * //- + * // By default, if a file cannot be deleted, this method will stop deleting + * // files from your bucket. You can override this setting with `force: + * // true`. + * //- + * bucket.deleteFiles({ + * force: true + * }, function(errors) { + * // `errors`: + * // Array of errors if any occurred, otherwise null. + * }); + * + * //- + * // The first argument to this method acts as a query to + * // {@link Bucket#getFiles}. As an example, you can delete files + * // which match a prefix. + * //- + * bucket.deleteFiles({ + * prefix: 'images/' + * }, function(err) { + * if (!err) { + * // All files in the `images` directory have been deleted. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.deleteFiles().then(function() {}); + * ``` + */ + deleteFiles(queryOrCallback, callback) { + let query = {}; + if (typeof queryOrCallback === 'function') { + callback = queryOrCallback; + } + else if (queryOrCallback) { + query = queryOrCallback; + } + const MAX_PARALLEL_LIMIT = 10; + const MAX_QUEUE_SIZE = 1000; + const errors = []; + const deleteFile = (file) => { + return file.delete(query).catch(err => { + if (!query.force) { + throw err; + } + errors.push(err); + }); + }; + (async () => { + try { + let promises = []; + const limit = (0, p_limit_1.default)(MAX_PARALLEL_LIMIT); + const filesStream = this.getFilesStream(query); + for await (const curFile of filesStream) { + if (promises.length >= MAX_QUEUE_SIZE) { + await Promise.all(promises); + promises = []; + } + promises.push(limit(() => deleteFile(curFile)).catch(e => { + filesStream.destroy(); + throw e; + })); + } + await Promise.all(promises); + callback(errors.length > 0 ? errors : null); + } + catch (e) { + callback(e); + return; + } + })(); + } + /** + * @deprecated + * @typedef {array} DeleteLabelsResponse + * @property {object} 0 The full API response. + */ + /** + * @deprecated + * @callback DeleteLabelsCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata Bucket's metadata. + */ + /** + * @deprecated Use setMetadata directly + * Delete one or more labels from this bucket. + * + * @param {string|string[]} [labels] The labels to delete. If no labels are + * provided, all of the labels are removed. + * @param {DeleteLabelsCallback} [callback] Callback function. + * @param {DeleteLabelsOptions} [options] Options, including precondition options + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Delete all of the labels from this bucket. + * //- + * bucket.deleteLabels(function(err, apiResponse) {}); + * + * //- + * // Delete a single label. + * //- + * bucket.deleteLabels('labelone', function(err, apiResponse) {}); + * + * //- + * // Delete a specific set of labels. + * //- + * bucket.deleteLabels([ + * 'labelone', + * 'labeltwo' + * ], function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.deleteLabels().then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + deleteLabels(labelsOrCallbackOrOptions, optionsOrCallback, callback) { + let labels = new Array(); + let options = {}; + if (typeof labelsOrCallbackOrOptions === 'function') { + callback = labelsOrCallbackOrOptions; + } + else if (typeof labelsOrCallbackOrOptions === 'string') { + labels = [labelsOrCallbackOrOptions]; + } + else if (Array.isArray(labelsOrCallbackOrOptions)) { + labels = labelsOrCallbackOrOptions; + } + else if (labelsOrCallbackOrOptions) { + options = labelsOrCallbackOrOptions; + } + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + const deleteLabels = (labels) => { + const nullLabelMap = labels.reduce((nullLabelMap, labelKey) => { + nullLabelMap[labelKey] = null; + return nullLabelMap; + }, {}); + if ((options === null || options === void 0 ? void 0 : options.ifMetagenerationMatch) !== undefined) { + this.setLabels(nullLabelMap, options, callback); + } + else { + this.setLabels(nullLabelMap, callback); + } + }; + if (labels.length === 0) { + this.getLabels((err, labels) => { + if (err) { + callback(err); + return; + } + deleteLabels(Object.keys(labels)); + }); + } + else { + deleteLabels(labels); + } + } + /** + * @typedef {array} DisableRequesterPaysResponse + * @property {object} 0 The full API response. + */ + /** + * @callback DisableRequesterPaysCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + *

+ * Early Access Testers Only + *

+ * This feature is not yet widely-available. + *

+ *
+ * + * Disable `requesterPays` functionality from this bucket. + * + * @param {DisableRequesterPaysCallback} [callback] Callback function. + * @param {DisableRequesterPaysOptions} [options] Options, including precondition options + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.disableRequesterPays(function(err, apiResponse) { + * if (!err) { + * // requesterPays functionality disabled successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.disableRequesterPays().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/requesterPays.js + * region_tag:storage_disable_requester_pays + * Example of disabling requester pays: + */ + disableRequesterPays(optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.setMetadata({ + billing: { + requesterPays: false, + }, + }, options, callback); + } + /** + * Configuration object for enabling logging. + * + * @typedef {object} EnableLoggingOptions + * @property {string|Bucket} [bucket] The bucket for the log entries. By + * default, the current bucket is used. + * @property {string} prefix A unique prefix for log object names. + */ + /** + * Enable logging functionality for this bucket. This will make two API + * requests, first to grant Cloud Storage WRITE permission to the bucket, then + * to set the appropriate configuration on the Bucket's metadata. + * + * @param {EnableLoggingOptions} config Configuration options. + * @param {string|Bucket} [config.bucket] The bucket for the log entries. By + * default, the current bucket is used. + * @param {string} config.prefix A unique prefix for log object names. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * const config = { + * prefix: 'log' + * }; + * + * bucket.enableLogging(config, function(err, apiResponse) { + * if (!err) { + * // Logging functionality enabled successfully. + * } + * }); + * + * ``` + * @example + * Optionally, provide a destination bucket. + * ``` + * const config = { + * prefix: 'log', + * bucket: 'destination-bucket' + * }; + * + * bucket.enableLogging(config, function(err, apiResponse) {}); + * ``` + * + * @example + * If the callback is omitted, we'll return a Promise. + * ``` + * bucket.enableLogging(config).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + enableLogging(config, callback) { + if (!config || + typeof config === 'function' || + typeof config.prefix === 'undefined') { + throw new Error(BucketExceptionMessages.CONFIGURATION_OBJECT_PREFIX_REQUIRED); + } + let logBucket = this.id; + if (config.bucket && config.bucket instanceof Bucket) { + logBucket = config.bucket.id; + } + else if (config.bucket && typeof config.bucket === 'string') { + logBucket = config.bucket; + } + const options = {}; + if (config === null || config === void 0 ? void 0 : config.ifMetagenerationMatch) { + options.ifMetagenerationMatch = config.ifMetagenerationMatch; + } + if (config === null || config === void 0 ? void 0 : config.ifMetagenerationNotMatch) { + options.ifMetagenerationNotMatch = config.ifMetagenerationNotMatch; + } + (async () => { + try { + const [policy] = await this.iam.getPolicy(); + policy.bindings.push({ + members: ['group:cloud-storage-analytics@google.com'], + role: 'roles/storage.objectCreator', + }); + await this.iam.setPolicy(policy); + this.setMetadata({ + logging: { + logBucket, + logObjectPrefix: config.prefix, + }, + }, options, callback); + } + catch (e) { + callback(e); + return; + } + })(); + } + /** + * @typedef {array} EnableRequesterPaysResponse + * @property {object} 0 The full API response. + */ + /** + * @callback EnableRequesterPaysCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + *
+ * Early Access Testers Only + *

+ * This feature is not yet widely-available. + *

+ *
+ * + * Enable `requesterPays` functionality for this bucket. This enables you, the + * bucket owner, to have the requesting user assume the charges for the access + * to your bucket and its contents. + * + * @param {EnableRequesterPaysCallback | EnableRequesterPaysOptions} [optionsOrCallback] + * Callback function or precondition options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.enableRequesterPays(function(err, apiResponse) { + * if (!err) { + * // requesterPays functionality enabled successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.enableRequesterPays().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/requesterPays.js + * region_tag:storage_enable_requester_pays + * Example of enabling requester pays: + */ + enableRequesterPays(optionsOrCallback, cb) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + cb = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.setMetadata({ + billing: { + requesterPays: true, + }, + }, options, cb); + } + /** + * Create a {@link File} object. See {@link File} to see how to handle + * the different use cases you may have. + * + * @param {string} name The name of the file in this bucket. + * @param {FileOptions} [options] Configuration options. + * @param {string|number} [options.generation] Only use a specific revision of + * this file. + * @param {string} [options.encryptionKey] A custom encryption key. See + * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. + * @param {string} [options.kmsKeyName] The name of the Cloud KMS key that will + * be used to encrypt the object. Must be in the format: + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. + * KMS key ring must use the same location as the bucket. + * @param {string} [options.userProject] The ID of the project which will be + * billed for all requests made from File object. + * @returns {File} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const file = bucket.file('my-existing-file.png'); + * ``` + */ + file(name, options) { + if (!name) { + throw Error(BucketExceptionMessages.SPECIFY_FILE_NAME); + } + return new file_js_1.File(this, name, options); + } + /** + * @typedef {array} GetFilesResponse + * @property {File[]} 0 Array of {@link File} instances. + * @param {object} nextQuery 1 A query object to receive more results. + * @param {object} apiResponse 2 The full API response. + */ + /** + * @callback GetFilesCallback + * @param {?Error} err Request error, if any. + * @param {File[]} files Array of {@link File} instances. + * @param {object} nextQuery A query object to receive more results. + * @param {object} apiResponse The full API response. + */ + /** + * Query object for listing files. + * + * @typedef {object} GetFilesOptions + * @property {boolean} [autoPaginate=true] Have pagination handled + * automatically. + * @property {string} [delimiter] Results will contain only objects whose + * names, aside from the prefix, do not contain delimiter. Objects whose + * names, aside from the prefix, contain delimiter will have their name + * truncated after the delimiter, returned in `apiResponse.prefixes`. + * Duplicate prefixes are omitted. + * @property {string} [endOffset] Filter results to objects whose names are + * lexicographically before endOffset. If startOffset is also set, the objects + * listed have names between startOffset (inclusive) and endOffset (exclusive). + * @property {boolean} [includeFoldersAsPrefixes] If true, includes folders and + * managed folders in the set of prefixes returned by the query. Only applicable if + * delimiter is set to / and autoPaginate is set to false. + * See: https://cloud.google.com/storage/docs/managed-folders + * @property {boolean} [includeTrailingDelimiter] If true, objects that end in + * exactly one instance of delimiter have their metadata included in items[] + * in addition to the relevant part of the object name appearing in prefixes[]. + * @property {string} [prefix] Filter results to objects whose names begin + * with this prefix. + * @property {string} [matchGlob] A glob pattern used to filter results, + * for example foo*bar + * @property {number} [maxApiCalls] Maximum number of API calls to make. + * @property {number} [maxResults] Maximum number of items plus prefixes to + * return per call. + * Note: By default will handle pagination automatically + * if more than 1 page worth of results are requested per call. + * When `autoPaginate` is set to `false` the smaller of `maxResults` + * or 1 page of results will be returned per call. + * @property {string} [pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @property {boolean} [softDeleted] If true, only soft-deleted object versions will be + * listed as distinct results in order of generation number. Note `soft_deleted` and + * `versions` cannot be set to true simultaneously. + * @property {string} [startOffset] Filter results to objects whose names are + * lexicographically equal to or after startOffset. If endOffset is also set, + * the objects listed have names between startOffset (inclusive) and endOffset (exclusive). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {boolean} [versions] If true, returns File objects scoped to + * their versions. + */ + /** + * Get {@link File} objects for the files currently in the bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/list| Objects: list API Documentation} + * + * @param {GetFilesOptions} [query] Query object for listing files. + * @param {boolean} [query.autoPaginate=true] Have pagination handled + * automatically. + * @param {string} [query.delimiter] Results will contain only objects whose + * names, aside from the prefix, do not contain delimiter. Objects whose + * names, aside from the prefix, contain delimiter will have their name + * truncated after the delimiter, returned in `apiResponse.prefixes`. + * Duplicate prefixes are omitted. + * @param {string} [query.endOffset] Filter results to objects whose names are + * lexicographically before endOffset. If startOffset is also set, the objects + * listed have names between startOffset (inclusive) and endOffset (exclusive). + * @param {boolean} [query.includeFoldersAsPrefixes] If true, includes folders and + * managed folders in the set of prefixes returned by the query. Only applicable if + * delimiter is set to / and autoPaginate is set to false. + * See: https://cloud.google.com/storage/docs/managed-folders + * @param {boolean} [query.includeTrailingDelimiter] If true, objects that end in + * exactly one instance of delimiter have their metadata included in items[] + * in addition to the relevant part of the object name appearing in prefixes[]. + * @param {string} [query.prefix] Filter results to objects whose names begin + * with this prefix. + * @param {number} [query.maxApiCalls] Maximum number of API calls to make. + * @param {number} [query.maxResults] Maximum number of items plus prefixes to + * return per call. + * Note: By default will handle pagination automatically + * if more than 1 page worth of results are requested per call. + * When `autoPaginate` is set to `false` the smaller of `maxResults` + * or 1 page of results will be returned per call. + * @param {string} [query.pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @param {boolean} [query.softDeleted] If true, only soft-deleted object versions will be + * listed as distinct results in order of generation number. Note `soft_deleted` and + * `versions` cannot be set to true simultaneously. + * @param {string} [query.startOffset] Filter results to objects whose names are + * lexicographically equal to or after startOffset. If endOffset is also set, + * the objects listed have names between startOffset (inclusive) and endOffset (exclusive). + * @param {string} [query.userProject] The ID of the project which will be + * billed for the request. + * @param {boolean} [query.versions] If true, returns File objects scoped to + * their versions. + * @param {GetFilesCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getFiles(function(err, files) { + * if (!err) { + * // files is an array of File objects. + * } + * }); + * + * //- + * // If your bucket has versioning enabled, you can get all of your files + * // scoped to their generation. + * //- + * bucket.getFiles({ + * versions: true + * }, function(err, files) { + * // Each file is scoped to its generation. + * }); + * + * //- + * // To control how many API requests are made and page through the results + * // manually, set `autoPaginate` to `false`. + * //- + * const callback = function(err, files, nextQuery, apiResponse) { + * if (nextQuery) { + * // More results exist. + * bucket.getFiles(nextQuery, callback); + * } + * + * // The `metadata` property is populated for you with the metadata at the + * // time of fetching. + * files[0].metadata; + * + * // However, in cases where you are concerned the metadata could have + * // changed, use the `getMetadata` method. + * files[0].getMetadata(function(err, metadata) {}); + * }; + * + * bucket.getFiles({ + * autoPaginate: false + * }, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getFiles().then(function(data) { + * const files = data[0]; + * }); + * + * ``` + * @example + *
Simulating a File System

With `autoPaginate: false`, it's possible to iterate over files which incorporate a common structure using a delimiter.

Consider the following remote objects:

  1. "a"
  2. "a/b/c/d"
  3. "b/d/e"

Using a delimiter of `/` will return a single file, "a".

`apiResponse.prefixes` will return the "sub-directories" that were found:

  1. "a/"
  2. "b/"
+ * ``` + * bucket.getFiles({ + * autoPaginate: false, + * delimiter: '/' + * }, function(err, files, nextQuery, apiResponse) { + * // files = [ + * // {File} // File object for file "a" + * // ] + * + * // apiResponse.prefixes = [ + * // 'a/', + * // 'b/' + * // ] + * }); + * ``` + * + * @example + * Using prefixes, it's now possible to simulate a file system with follow-up requests. + * ``` + * bucket.getFiles({ + * autoPaginate: false, + * delimiter: '/', + * prefix: 'a/' + * }, function(err, files, nextQuery, apiResponse) { + * // No files found within "directory" a. + * // files = [] + * + * // However, a "sub-directory" was found. + * // This prefix can be used to continue traversing the "file system". + * // apiResponse.prefixes = [ + * // 'a/b/' + * // ] + * }); + * ``` + * + * @example include:samples/files.js + * region_tag:storage_list_files + * Another example: + * + * @example include:samples/files.js + * region_tag:storage_list_files_with_prefix + * Example of listing files, filtered by a prefix: + */ + getFiles(queryOrCallback, callback) { + let query = typeof queryOrCallback === 'object' ? queryOrCallback : {}; + if (!callback) { + callback = queryOrCallback; + } + query = Object.assign({}, query); + if (query.fields && + query.autoPaginate && + !query.fields.includes('nextPageToken')) { + query.fields = `${query.fields},nextPageToken`; + } + this.request({ + uri: '/o', + qs: query, + }, (err, resp) => { + if (err) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + callback(err, null, null, resp); + return; + } + const itemsArray = resp.items ? resp.items : []; + const files = itemsArray.map((file) => { + const options = {}; + if (query.fields) { + const fileInstance = file; + return fileInstance; + } + if (query.versions) { + options.generation = file.generation; + } + if (file.kmsKeyName) { + options.kmsKeyName = file.kmsKeyName; + } + const fileInstance = this.file(file.name, options); + fileInstance.metadata = file; + return fileInstance; + }); + let nextQuery = null; + if (resp.nextPageToken) { + nextQuery = Object.assign({}, query, { + pageToken: resp.nextPageToken, + }); + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + callback(null, files, nextQuery, resp); + }); + } + /** + * @deprecated + * @typedef {object} GetLabelsOptions Configuration options for Bucket#getLabels(). + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @deprecated + * @typedef {array} GetLabelsResponse + * @property {object} 0 Object of labels currently set on this bucket. + */ + /** + * @deprecated + * @callback GetLabelsCallback + * @param {?Error} err Request error, if any. + * @param {object} labels Object of labels currently set on this bucket. + */ + /** + * @deprecated Use getMetadata directly. + * Get the labels currently set on this bucket. + * + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetLabelsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.getLabels(function(err, labels) { + * if (err) { + * // Error handling omitted. + * } + * + * // labels = { + * // label: 'labelValue', + * // ... + * // } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getLabels().then(function(data) { + * const labels = data[0]; + * }); + * ``` + */ + getLabels(optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.getMetadata(options, (err, metadata) => { + if (err) { + callback(err, null); + return; + } + callback(null, (metadata === null || metadata === void 0 ? void 0 : metadata.labels) || {}); + }); + } + /** + * @typedef {object} GetNotificationsOptions Configuration options for Bucket#getNotification(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback GetNotificationsCallback + * @param {?Error} err Request error, if any. + * @param {Notification[]} notifications Array of {@link Notification} + * instances. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} GetNotificationsResponse + * @property {Notification[]} 0 Array of {@link Notification} instances. + * @property {object} 1 The full API response. + */ + /** + * Retrieves a list of notification subscriptions for a given bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/list| Notifications: list} + * + * @param {GetNotificationsOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetNotificationsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * bucket.getNotifications(function(err, notifications, apiResponse) { + * if (!err) { + * // notifications is an array of Notification objects. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getNotifications().then(function(data) { + * const notifications = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/listNotifications.js + * region_tag:storage_list_bucket_notifications + * Another example: + */ + getNotifications(optionsOrCallback, callback) { + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = optionsOrCallback; + } + this.request({ + uri: '/notificationConfigs', + qs: options, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + const itemsArray = resp.items ? resp.items : []; + const notifications = itemsArray.map((notification) => { + const notificationInstance = this.notification(notification.id); + notificationInstance.metadata = notification; + return notificationInstance; + }); + callback(null, notifications, resp); + }); + } + /** + * @typedef {array} GetSignedUrlResponse + * @property {object} 0 The signed URL. + */ + /** + * @callback GetSignedUrlCallback + * @param {?Error} err Request error, if any. + * @param {object} url The signed URL. + */ + /** + * @typedef {object} GetBucketSignedUrlConfig + * @property {string} action Only listing objects within a bucket (HTTP: GET) is supported for bucket-level signed URLs. + * @property {*} expires A timestamp when this link will expire. Any value + * given is passed to `new Date()`. + * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now. + * @property {string} [version='v2'] The signing version to use, either + * 'v2' or 'v4'. + * @property {boolean} [virtualHostedStyle=false] Use virtual hosted-style + * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style + * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs + * should generally be preferred instaed of path-style URL. + * Currently defaults to `false` for path-style, although this may change in a + * future major-version release. + * @property {string} [cname] The cname for this bucket, i.e., + * "https://cdn.example.com". + * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example} + * @property {object} [extensionHeaders] If these headers are used, the + * server will check to make sure that the client provides matching + * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers} + * for the requirements of this feature, most notably: + * - The header name must be prefixed with `x-goog-` + * - The header name must be all lowercase + * + * Note: Multi-valued header passed as an array in the extensionHeaders + * object is converted into a string, delimited by `,` with + * no space. Requests made using the signed URL will need to + * delimit multi-valued headers using a single `,` as well, or + * else the server will report a mismatched signature. + * @property {object} [queryParams] Additional query parameters to include + * in the signed URL. + */ + /** + * Get a signed URL to allow limited time access to a bucket. + * + * In Google Cloud Platform environments, such as Cloud Functions and App + * Engine, you usually don't provide a `keyFilename` or `credentials` during + * instantiation. In those environments, we call the + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} + * to create a signed URL. That API requires either the + * `https://www.googleapis.com/auth/iam` or + * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are + * enabled. + * + * See {@link https://cloud.google.com/storage/docs/access-control/signed-urls| Signed URLs Reference} + * + * @throws {Error} if an expiration timestamp from the past is given. + * + * @param {GetBucketSignedUrlConfig} config Configuration object. + * @param {string} config.action Currently only supports "list" (HTTP: GET). + * @param {*} config.expires A timestamp when this link will expire. Any value + * given is passed to `new Date()`. + * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now. + * @param {string} [config.version='v2'] The signing version to use, either + * 'v2' or 'v4'. + * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style + * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style + * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs + * should generally be preferred instaed of path-style URL. + * Currently defaults to `false` for path-style, although this may change in a + * future major-version release. + * @param {string} [config.cname] The cname for this bucket, i.e., + * "https://cdn.example.com". + * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example} + * @param {object} [config.extensionHeaders] If these headers are used, the + * server will check to make sure that the client provides matching + * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers} + * for the requirements of this feature, most notably: + * - The header name must be prefixed with `x-goog-` + * - The header name must be all lowercase + * + * Note: Multi-valued header passed as an array in the extensionHeaders + * object is converted into a string, delimited by `,` with + * no space. Requests made using the signed URL will need to + * delimit multi-valued headers using a single `,` as well, or + * else the server will report a mismatched signature. + * @property {object} [config.queryParams] Additional query parameters to include + * in the signed URL. + * @param {GetSignedUrlCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * //- + * // Generate a URL that allows temporary access to list files in a bucket. + * //- + * const request = require('request'); + * + * const config = { + * action: 'list', + * expires: '03-17-2025' + * }; + * + * bucket.getSignedUrl(config, function(err, url) { + * if (err) { + * console.error(err); + * return; + * } + * + * // The bucket is now available to be listed from this URL. + * request(url, function(err, resp) { + * // resp.statusCode = 200 + * }); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.getSignedUrl(config).then(function(data) { + * const url = data[0]; + * }); + * ``` + */ + getSignedUrl(cfg, callback) { + const method = BucketActionToHTTPMethod[cfg.action]; + const signConfig = { + method, + expires: cfg.expires, + version: cfg.version, + cname: cfg.cname, + extensionHeaders: cfg.extensionHeaders || {}, + queryParams: cfg.queryParams || {}, + host: cfg.host, + signingEndpoint: cfg.signingEndpoint, + }; + if (!this.signer) { + this.signer = new signer_js_1.URLSigner(this.storage.authClient, this, undefined, this.storage); + } + this.signer + .getSignedUrl(signConfig) + .then(signedUrl => callback(null, signedUrl), callback); + } + /** + * @callback BucketLockCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Lock a previously-defined retention policy. This will prevent changes to + * the policy. + * + * @throws {Error} if a metageneration is not provided. + * + * @param {number|string} metageneration The bucket's metageneration. This is + * accesssible from calling {@link File#getMetadata}. + * @param {BucketLockCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const bucket = storage.bucket('albums'); + * + * const metageneration = 2; + * + * bucket.lock(metageneration, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.lock(metageneration).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + lock(metageneration, callback) { + const metatype = typeof metageneration; + if (metatype !== 'number' && metatype !== 'string') { + throw new Error(BucketExceptionMessages.METAGENERATION_NOT_PROVIDED); + } + this.request({ + method: 'POST', + uri: '/lockRetentionPolicy', + qs: { + ifMetagenerationMatch: metageneration, + }, + }, callback); + } + /** + * @typedef {object} RestoreOptions Options for Bucket#restore(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/restore#resource| Object resource}. + * @param {number} [generation] If present, selects a specific revision of this object. + * @param {string} [projection] Specifies the set of properties to return. If used, must be 'full' or 'noAcl'. + */ + /** + * Restores a soft-deleted bucket + * @param {RestoreOptions} options Restore options. + * @returns {Promise} + */ + async restore(options) { + const [bucket] = await this.request({ + method: 'POST', + uri: '/restore', + qs: options, + }); + return bucket; + } + /** + * @typedef {array} MakeBucketPrivateResponse + * @property {File[]} 0 List of files made private. + */ + /** + * @callback MakeBucketPrivateCallback + * @param {?Error} err Request error, if any. + * @param {File[]} files List of files made private. + */ + /** + * @typedef {object} MakeBucketPrivateOptions + * @property {boolean} [includeFiles=false] Make each file in the bucket + * private. + * @property {Metadata} [metadata] Define custom metadata properties to define + * along with the operation. + * @property {boolean} [force] Queue errors occurred while making files + * private until all files have been processed. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Make the bucket listing private. + * + * You may also choose to make the contents of the bucket private by + * specifying `includeFiles: true`. This will automatically run + * {@link File#makePrivate} for every file in the bucket. + * + * When specifying `includeFiles: true`, use `force: true` to delay execution + * of your callback until all files have been processed. By default, the + * callback is executed after the first error. Use `force` to queue such + * errors until all files have been processed, after which they will be + * returned as an array as the first argument to your callback. + * + * NOTE: This may cause the process to be long-running and use a high number + * of requests. Use with caution. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} + * + * @param {MakeBucketPrivateOptions} [options] Configuration options. + * @param {boolean} [options.includeFiles=false] Make each file in the bucket + * private. + * @param {Metadata} [options.metadata] Define custom metadata properties to define + * along with the operation. + * @param {boolean} [options.force] Queue errors occurred while making files + * private until all files have been processed. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {MakeBucketPrivateCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Make the bucket private. + * //- + * bucket.makePrivate(function(err) {}); + * + * //- + * // Make the bucket and its contents private. + * //- + * const opts = { + * includeFiles: true + * }; + * + * bucket.makePrivate(opts, function(err, files) { + * // `err`: + * // The first error to occur, otherwise null. + * // + * // `files`: + * // Array of files successfully made private in the bucket. + * }); + * + * //- + * // Make the bucket and its contents private, using force to suppress errors + * // until all files have been processed. + * //- + * const opts = { + * includeFiles: true, + * force: true + * }; + * + * bucket.makePrivate(opts, function(errors, files) { + * // `errors`: + * // Array of errors if any occurred, otherwise null. + * // + * // `files`: + * // Array of files successfully made private in the bucket. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.makePrivate(opts).then(function(data) { + * const files = data[0]; + * }); + * ``` + */ + makePrivate(optionsOrCallback, callback) { + var _a, _b, _c, _d; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + options.private = true; + const query = { + predefinedAcl: 'projectPrivate', + }; + if (options.userProject) { + query.userProject = options.userProject; + } + if ((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) { + query.ifGenerationMatch = options.preconditionOpts.ifGenerationMatch; + } + if ((_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationNotMatch) { + query.ifGenerationNotMatch = + options.preconditionOpts.ifGenerationNotMatch; + } + if ((_c = options.preconditionOpts) === null || _c === void 0 ? void 0 : _c.ifMetagenerationMatch) { + query.ifMetagenerationMatch = + options.preconditionOpts.ifMetagenerationMatch; + } + if ((_d = options.preconditionOpts) === null || _d === void 0 ? void 0 : _d.ifMetagenerationNotMatch) { + query.ifMetagenerationNotMatch = + options.preconditionOpts.ifMetagenerationNotMatch; + } + // You aren't allowed to set both predefinedAcl & acl properties on a bucket + // so acl must explicitly be nullified. + const metadata = { ...options.metadata, acl: null }; + this.setMetadata(metadata, query, (err) => { + if (err) { + callback(err); + } + const internalCall = () => { + if (options.includeFiles) { + return (0, util_1.promisify)(this.makeAllFilesPublicPrivate_).call(this, options); + } + return Promise.resolve([]); + }; + internalCall() + .then(files => callback(null, files)) + .catch(callback); + }); + } + /** + * @typedef {object} MakeBucketPublicOptions + * @property {boolean} [includeFiles=false] Make each file in the bucket + * private. + * @property {boolean} [force] Queue errors occurred while making files + * private until all files have been processed. + */ + /** + * @callback MakeBucketPublicCallback + * @param {?Error} err Request error, if any. + * @param {File[]} files List of files made public. + */ + /** + * @typedef {array} MakeBucketPublicResponse + * @property {File[]} 0 List of files made public. + */ + /** + * Make the bucket publicly readable. + * + * You may also choose to make the contents of the bucket publicly readable by + * specifying `includeFiles: true`. This will automatically run + * {@link File#makePublic} for every file in the bucket. + * + * When specifying `includeFiles: true`, use `force: true` to delay execution + * of your callback until all files have been processed. By default, the + * callback is executed after the first error. Use `force` to queue such + * errors until all files have been processed, after which they will be + * returned as an array as the first argument to your callback. + * + * NOTE: This may cause the process to be long-running and use a high number + * of requests. Use with caution. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} + * + * @param {MakeBucketPublicOptions} [options] Configuration options. + * @param {boolean} [options.includeFiles=false] Make each file in the bucket + * private. + * @param {boolean} [options.force] Queue errors occurred while making files + * private until all files have been processed. + * @param {MakeBucketPublicCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Make the bucket publicly readable. + * //- + * bucket.makePublic(function(err) {}); + * + * //- + * // Make the bucket and its contents publicly readable. + * //- + * const opts = { + * includeFiles: true + * }; + * + * bucket.makePublic(opts, function(err, files) { + * // `err`: + * // The first error to occur, otherwise null. + * // + * // `files`: + * // Array of files successfully made public in the bucket. + * }); + * + * //- + * // Make the bucket and its contents publicly readable, using force to + * // suppress errors until all files have been processed. + * //- + * const opts = { + * includeFiles: true, + * force: true + * }; + * + * bucket.makePublic(opts, function(errors, files) { + * // `errors`: + * // Array of errors if any occurred, otherwise null. + * // + * // `files`: + * // Array of files successfully made public in the bucket. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.makePublic(opts).then(function(data) { + * const files = data[0]; + * }); + * ``` + */ + makePublic(optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const req = { public: true, ...options }; + this.acl + .add({ + entity: 'allUsers', + role: 'READER', + }) + .then(() => { + return this.acl.default.add({ + entity: 'allUsers', + role: 'READER', + }); + }) + .then(() => { + if (req.includeFiles) { + return (0, util_1.promisify)(this.makeAllFilesPublicPrivate_).call(this, req); + } + return []; + }) + .then(files => callback(null, files), callback); + } + /** + * Get a reference to a Cloud Pub/Sub Notification. + * + * @param {string} id ID of notification. + * @returns {Notification} + * @see Notification + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const notification = bucket.notification('1'); + * ``` + */ + notification(id) { + if (!id) { + throw new Error(BucketExceptionMessages.SUPPLY_NOTIFICATION_ID); + } + return new notification_js_1.Notification(this, id); + } + /** + * Remove an already-existing retention policy from this bucket, if it is not + * locked. + * + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @param {SetBucketMetadataOptions} [options] Options, including precondition options + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const bucket = storage.bucket('albums'); + * + * bucket.removeRetentionPeriod(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.removeRetentionPeriod().then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + removeRetentionPeriod(optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + this.setMetadata({ + retentionPolicy: null, + }, options, callback); + } + /** + * Makes request and applies userProject query parameter if necessary. + * + * @private + * + * @param {object} reqOpts - The request options. + * @param {function} callback - The callback function. + */ + request(reqOpts, callback) { + if (this.userProject && (!reqOpts.qs || !reqOpts.qs.userProject)) { + reqOpts.qs = { ...reqOpts.qs, userProject: this.userProject }; + } + return super.request(reqOpts, callback); + } + /** + * @deprecated + * @typedef {array} SetLabelsResponse + * @property {object} 0 The bucket metadata. + */ + /** + * @deprecated + * @callback SetLabelsCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata The bucket metadata. + */ + /** + * @deprecated + * @typedef {object} SetLabelsOptions Configuration options for Bucket#setLabels(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @deprecated Use setMetadata directly. + * Set labels on the bucket. + * + * This makes an underlying call to {@link Bucket#setMetadata}, which + * is a PATCH request. This means an individual label can be overwritten, but + * unmentioned labels will not be touched. + * + * @param {object} labels Labels to set on the bucket. + * @param {SetLabelsOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {SetLabelsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * const labels = { + * labelone: 'labelonevalue', + * labeltwo: 'labeltwovalue' + * }; + * + * bucket.setLabels(labels, function(err, metadata) { + * if (!err) { + * // Labels set successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setLabels(labels).then(function(data) { + * const metadata = data[0]; + * }); + * ``` + */ + setLabels(labels, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + callback = callback || index_js_1.util.noop; + this.setMetadata({ labels }, options, callback); + } + setMetadata(metadata, optionsOrCallback, cb) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = + typeof optionsOrCallback === 'function' + ? optionsOrCallback + : cb; + this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, AvailableServiceObjectMethods.setMetadata, options); + super + .setMetadata(metadata, options) + .then(resp => cb(null, ...resp)) + .catch(cb) + .finally(() => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + }); + } + /** + * Lock all objects contained in the bucket, based on their creation time. Any + * attempt to overwrite or delete objects younger than the retention period + * will result in a `PERMISSION_DENIED` error. + * + * An unlocked retention policy can be modified or removed from the bucket via + * {@link File#removeRetentionPeriod} and {@link File#setRetentionPeriod}. A + * locked retention policy cannot be removed or shortened in duration for the + * lifetime of the bucket. Attempting to remove or decrease period of a locked + * retention policy will result in a `PERMISSION_DENIED` error. You can still + * increase the policy. + * + * @param {*} duration In seconds, the minimum retention time for all objects + * contained in this bucket. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @param {SetBucketMetadataCallback} [options] Options, including precondition options. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const bucket = storage.bucket('albums'); + * + * const DURATION_SECONDS = 15780000; // 6 months. + * + * //- + * // Lock the objects in this bucket for 6 months. + * //- + * bucket.setRetentionPeriod(DURATION_SECONDS, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setRetentionPeriod(DURATION_SECONDS).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + setRetentionPeriod(duration, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + this.setMetadata({ + retentionPolicy: { + retentionPeriod: duration.toString(), + }, + }, options, callback); + } + /** + * + * @typedef {object} Cors + * @property {number} [maxAgeSeconds] The number of seconds the browser is + * allowed to make requests before it must repeat the preflight request. + * @property {string[]} [method] HTTP method allowed for cross origin resource + * sharing with this bucket. + * @property {string[]} [origin] an origin allowed for cross origin resource + * sharing with this bucket. + * @property {string[]} [responseHeader] A header allowed for cross origin + * resource sharing with this bucket. + */ + /** + * This can be used to set the CORS configuration on the bucket. + * + * The configuration will be overwritten with the value passed into this. + * + * @param {Cors[]} corsConfiguration The new CORS configuration to set + * @param {number} [corsConfiguration.maxAgeSeconds] The number of seconds the browser is + * allowed to make requests before it must repeat the preflight request. + * @param {string[]} [corsConfiguration.method] HTTP method allowed for cross origin resource + * sharing with this bucket. + * @param {string[]} [corsConfiguration.origin] an origin allowed for cross origin resource + * sharing with this bucket. + * @param {string[]} [corsConfiguration.responseHeader] A header allowed for cross origin + * resource sharing with this bucket. + * @param {SetBucketMetadataCallback} [callback] Callback function. + * @param {SetBucketMetadataOptions} [options] Options, including precondition options. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const bucket = storage.bucket('albums'); + * + * const corsConfiguration = [{maxAgeSeconds: 3600}]; // 1 hour + * bucket.setCorsConfiguration(corsConfiguration); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setCorsConfiguration(corsConfiguration).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + setCorsConfiguration(corsConfiguration, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + this.setMetadata({ + cors: corsConfiguration, + }, options, callback); + } + /** + * @typedef {object} SetBucketStorageClassOptions + * @property {string} [userProject] - The ID of the project which will be + * billed for the request. + */ + /** + * @callback SetBucketStorageClassCallback + * @param {?Error} err Request error, if any. + */ + /** + * Set the default storage class for new files in this bucket. + * + * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes} + * + * @param {string} storageClass The new storage class. (`standard`, + * `nearline`, `coldline`, or `archive`). + * **Note:** The storage classes `multi_regional`, `regional`, and + * `durable_reduced_availability` are now legacy and will be deprecated in + * the future. + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] - The ID of the project which will be + * billed for the request. + * @param {SetStorageClassCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.setStorageClass('nearline', function(err, apiResponse) { + * if (err) { + * // Error handling omitted. + * } + * + * // The storage class was updated successfully. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.setStorageClass('nearline').then(function() {}); + * ``` + */ + setStorageClass(storageClass, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + // In case we get input like `storageClass`, convert to `storage_class`. + storageClass = storageClass + .replace(/-/g, '_') + .replace(/([a-z])([A-Z])/g, (_, low, up) => { + return low + '_' + up; + }) + .toUpperCase(); + this.setMetadata({ storageClass }, options, callback); + } + /** + * Set a user project to be billed for all requests made from this Bucket + * object and any files referenced from this Bucket object. + * + * @param {string} userProject The user project. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * bucket.setUserProject('grape-spaceship-123'); + * ``` + */ + setUserProject(userProject) { + this.userProject = userProject; + const methods = [ + 'create', + 'delete', + 'exists', + 'get', + 'getMetadata', + 'setMetadata', + ]; + methods.forEach(method => { + const methodConfig = this.methods[method]; + if (typeof methodConfig === 'object') { + if (typeof methodConfig.reqOpts === 'object') { + Object.assign(methodConfig.reqOpts.qs, { userProject }); + } + else { + methodConfig.reqOpts = { + qs: { userProject }, + }; + } + } + }); + } + /** + * @typedef {object} UploadOptions Configuration options for Bucket#upload(). + * @property {string|File} [destination] The place to save + * your file. If given a string, the file will be uploaded to the bucket + * using the string as a filename. When given a File object, your local + * file will be uploaded to the File object's bucket and under the File + * object's name. Lastly, when this argument is omitted, the file is uploaded + * to your bucket using the name of the local file. + * @property {string} [encryptionKey] A custom encryption key. See + * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. + * @property {boolean} [gzip] Automatically gzip the file. This will set + * `options.metadata.contentEncoding` to `gzip`. + * @property {string} [kmsKeyName] The name of the Cloud KMS key that will + * be used to encrypt the object. Must be in the format: + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. + * @property {object} [metadata] See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body}. + * @property {string} [offset] The starting byte of the upload stream, for + * resuming an interrupted upload. Defaults to 0. + * @property {string} [predefinedAcl] Apply a predefined set of access + * controls to this object. + * + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @property {boolean} [private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @property {boolean} [public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @property {boolean} [resumable=true] Resumable uploads are automatically + * enabled and must be shut off explicitly by setting to false. + * @property {number} [timeout=60000] Set the HTTP request timeout in + * milliseconds. This option is not available for resumable uploads. + * Default: `60000` + * @property {string} [uri] The URI for an already-created resumable + * upload. See {@link File#createResumableUpload}. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string|boolean} [validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with an + * MD5 checksum for maximum reliability. CRC32c will provide better + * performance with less reliability. You may also choose to skip + * validation completely, however this is **not recommended**. + */ + /** + * @typedef {array} UploadResponse + * @property {object} 0 The uploaded {@link File}. + * @property {object} 1 The full API response. + */ + /** + * @callback UploadCallback + * @param {?Error} err Request error, if any. + * @param {object} file The uploaded {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * Upload a file to the bucket. This is a convenience method that wraps + * {@link File#createWriteStream}. + * + * Resumable uploads are enabled by default + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload#uploads| Upload Options (Simple or Resumable)} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert| Objects: insert API Documentation} + * + * @param {string} pathString The fully qualified path to the file you + * wish to upload to your bucket. + * @param {UploadOptions} [options] Configuration options. + * @param {string|File} [options.destination] The place to save + * your file. If given a string, the file will be uploaded to the bucket + * using the string as a filename. When given a File object, your local + * file will be uploaded to the File object's bucket and under the File + * object's name. Lastly, when this argument is omitted, the file is uploaded + * to your bucket using the name of the local file. + * @param {string} [options.encryptionKey] A custom encryption key. See + * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. + * @param {boolean} [options.gzip] Automatically gzip the file. This will set + * `options.metadata.contentEncoding` to `gzip`. + * @param {string} [options.kmsKeyName] The name of the Cloud KMS key that will + * be used to encrypt the object. Must be in the format: + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. + * @param {object} [options.metadata] See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body}. + * @param {string} [options.offset] The starting byte of the upload stream, for + * resuming an interrupted upload. Defaults to 0. + * @param {string} [options.predefinedAcl] Apply a predefined set of access + * controls to this object. + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @param {boolean} [options.private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @param {boolean} [options.public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @param {boolean} [options.resumable=true] Resumable uploads are automatically + * enabled and must be shut off explicitly by setting to false. + * @param {number} [options.timeout=60000] Set the HTTP request timeout in + * milliseconds. This option is not available for resumable uploads. + * Default: `60000` + * @param {string} [options.uri] The URI for an already-created resumable + * upload. See {@link File#createResumableUpload}. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {string|boolean} [options.validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with an + * MD5 checksum for maximum reliability. CRC32c will provide better + * performance with less reliability. You may also choose to skip + * validation completely, however this is **not recommended**. + * @param {UploadCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * + * //- + * // Upload a file from a local path. + * //- + * bucket.upload('/local/path/image.png', function(err, file, apiResponse) { + * // Your bucket now contains: + * // - "image.png" (with the contents of `/local/path/image.png') + * + * // `file` is an instance of a File object that refers to your new file. + * }); + * + * + * //- + * // It's not always that easy. You will likely want to specify the filename + * // used when your new file lands in your bucket. + * // + * // You may also want to set metadata or customize other options. + * //- + * const options = { + * destination: 'new-image.png', + * validation: 'crc32c', + * metadata: { + * metadata: { + * event: 'Fall trip to the zoo' + * } + * } + * }; + * + * bucket.upload('local-image.png', options, function(err, file) { + * // Your bucket now contains: + * // - "new-image.png" (with the contents of `local-image.png') + * + * // `file` is an instance of a File object that refers to your new file. + * }); + * + * //- + * // You can also have a file gzip'd on the fly. + * //- + * bucket.upload('index.html', { gzip: true }, function(err, file) { + * // Your bucket now contains: + * // - "index.html" (automatically compressed with gzip) + * + * // Downloading the file with `file.download` will automatically decode + * the + * // file. + * }); + * + * //- + * // You may also re-use a File object, {File}, that references + * // the file you wish to create or overwrite. + * //- + * const options = { + * destination: bucket.file('existing-file.png'), + * resumable: false + * }; + * + * bucket.upload('local-img.png', options, function(err, newFile) { + * // Your bucket now contains: + * // - "existing-file.png" (with the contents of `local-img.png') + * + * // Note: + * // The `newFile` parameter is equal to `file`. + * }); + * + * //- + * // To use + * // + * // Customer-supplied Encryption Keys, provide the `encryptionKey` + * option. + * //- + * const crypto = require('crypto'); + * const encryptionKey = crypto.randomBytes(32); + * + * bucket.upload('img.png', { + * encryptionKey: encryptionKey + * }, function(err, newFile) { + * // `img.png` was uploaded with your custom encryption key. + * + * // `newFile` is already configured to use the encryption key when making + * // operations on the remote object. + * + * // However, to use your encryption key later, you must create a `File` + * // instance with the `key` supplied: + * const file = bucket.file('img.png', { + * encryptionKey: encryptionKey + * }); + * + * // Or with `file#setEncryptionKey`: + * const file = bucket.file('img.png'); + * file.setEncryptionKey(encryptionKey); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.upload('local-image.png').then(function(data) { + * const file = data[0]; + * }); + * + * To upload a file from a URL, use {@link File#createWriteStream}. + * + * ``` + * @example include:samples/files.js + * region_tag:storage_upload_file + * Another example: + * + * @example include:samples/encryption.js + * region_tag:storage_upload_encrypted_file + * Example of uploading an encrypted file: + */ + upload(pathString, optionsOrCallback, callback) { + var _a, _b; + const upload = (numberOfRetries) => { + const returnValue = (0, async_retry_1.default)(async (bail) => { + await new Promise((resolve, reject) => { + var _a, _b; + if (numberOfRetries === 0 && + ((_b = (_a = newFile === null || newFile === void 0 ? void 0 : newFile.storage) === null || _a === void 0 ? void 0 : _a.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry)) { + newFile.storage.retryOptions.autoRetry = false; + } + const writable = newFile.createWriteStream(options); + if (options.onUploadProgress) { + writable.on('progress', options.onUploadProgress); + } + fs.createReadStream(pathString) + .on('error', bail) + .pipe(writable) + .on('error', err => { + if (this.storage.retryOptions.autoRetry && + this.storage.retryOptions.retryableErrorFn(err)) { + return reject(err); + } + else { + return bail(err); + } + }) + .on('finish', () => { + return resolve(); + }); + }); + }, { + retries: numberOfRetries, + factor: this.storage.retryOptions.retryDelayMultiplier, + maxTimeout: this.storage.retryOptions.maxRetryDelay * 1000, //convert to milliseconds + maxRetryTime: this.storage.retryOptions.totalTimeout * 1000, //convert to milliseconds + }); + if (!callback) { + return returnValue; + } + else { + return returnValue + .then(() => { + if (callback) { + return callback(null, newFile, newFile.metadata); + } + }) + .catch(callback); + } + }; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + if (global['GCLOUD_SANDBOX_ENV']) { + return; + } + let options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + options = Object.assign({ + metadata: {}, + }, options); + // Do not retry if precondition option ifGenerationMatch is not set + // because this is a file operation + let maxRetries = this.storage.retryOptions.maxRetries; + if ((((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined && + ((_b = this.instancePreconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + maxRetries = 0; + } + let newFile; + if (options.destination instanceof file_js_1.File) { + newFile = options.destination; + } + else if (options.destination !== null && + typeof options.destination === 'string') { + // Use the string as the name of the file. + newFile = this.file(options.destination, { + encryptionKey: options.encryptionKey, + kmsKeyName: options.kmsKeyName, + preconditionOpts: this.instancePreconditionOpts, + }); + } + else { + // Resort to using the name of the incoming file. + const destination = path.basename(pathString); + newFile = this.file(destination, { + encryptionKey: options.encryptionKey, + kmsKeyName: options.kmsKeyName, + preconditionOpts: this.instancePreconditionOpts, + }); + } + upload(maxRetries); + } + /** + * @private + * + * @typedef {object} MakeAllFilesPublicPrivateOptions + * @property {boolean} [force] Suppress errors until all files have been + * processed. + * @property {boolean} [private] Make files private. + * @property {boolean} [public] Make files public. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @private + * + * @callback SetBucketMetadataCallback + * @param {?Error} err Request error, if any. + * @param {File[]} files Files that were updated. + */ + /** + * @typedef {array} MakeAllFilesPublicPrivateResponse + * @property {File[]} 0 List of files affected. + */ + /** + * Iterate over all of a bucket's files, calling `file.makePublic()` (public) + * or `file.makePrivate()` (private) on each. + * + * Operations are performed in parallel, up to 10 at once. The first error + * breaks the loop, and will execute the provided callback with it. Specify + * `{ force: true }` to suppress the errors. + * + * @private + * + * @param {MakeAllFilesPublicPrivateOptions} [options] Configuration options. + * @param {boolean} [options.force] Suppress errors until all files have been + * processed. + * @param {boolean} [options.private] Make files private. + * @param {boolean} [options.public] Make files public. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + + * @param {MakeAllFilesPublicPrivateCallback} callback Callback function. + * + * @return {Promise} + */ + makeAllFilesPublicPrivate_(optionsOrCallback, callback) { + const MAX_PARALLEL_LIMIT = 10; + const errors = []; + const updatedFiles = []; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const processFile = async (file) => { + try { + await (options.public ? file.makePublic() : file.makePrivate(options)); + updatedFiles.push(file); + } + catch (e) { + if (!options.force) { + throw e; + } + errors.push(e); + } + }; + this.getFiles(options) + .then(([files]) => { + const limit = (0, p_limit_1.default)(MAX_PARALLEL_LIMIT); + const promises = files.map(file => { + return limit(() => processFile(file)); + }); + return Promise.all(promises); + }) + .then(() => callback(errors.length > 0 ? errors : null, updatedFiles), err => callback(err, updatedFiles)); + } + getId() { + return this.id; + } + disableAutoRetryConditionallyIdempotent_( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + coreOpts, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + methodType, localPreconditionOptions) { + var _a, _b; + if (typeof coreOpts === 'object' && + ((_b = (_a = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _a === void 0 ? void 0 : _a.qs) === null || _b === void 0 ? void 0 : _b.ifMetagenerationMatch) === undefined && + (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifMetagenerationMatch) === undefined && + (methodType === AvailableServiceObjectMethods.setMetadata || + methodType === AvailableServiceObjectMethods.delete) && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) { + this.storage.retryOptions.autoRetry = false; + } + else if (this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + this.storage.retryOptions.autoRetry = false; + } + } +} +exports.Bucket = Bucket; +/*! Developer Documentation + * + * These methods can be auto-paginated. + */ +paginator_1.paginator.extend(Bucket, 'getFiles'); +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Bucket, { + exclude: ['cloudStorageURI', 'request', 'file', 'notification', 'restore'], +}); + + +/***/ }), + +/***/ 2658: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Channel = void 0; +const index_js_1 = __nccwpck_require__(1325); +const promisify_1 = __nccwpck_require__(206); +/** + * Create a channel object to interact with a Cloud Storage channel. + * + * See {@link https://cloud.google.com/storage/docs/object-change-notification| Object Change Notification} + * + * @class + * + * @param {string} id The ID of the channel. + * @param {string} resourceId The resource ID of the channel. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const channel = storage.channel('id', 'resource-id'); + * ``` + */ +class Channel extends index_js_1.ServiceObject { + constructor(storage, id, resourceId) { + const config = { + parent: storage, + baseUrl: '/channels', + // An ID shouldn't be included in the API requests. + // RE: + // https://github.com/GoogleCloudPlatform/google-cloud-node/issues/1145 + id: '', + methods: { + // Only need `request`. + }, + }; + super(config); + this.metadata.id = id; + this.metadata.resourceId = resourceId; + } + /** + * @typedef {array} StopResponse + * @property {object} 0 The full API response. + */ + /** + * @callback StopCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Stop this channel. + * + * @param {StopCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const channel = storage.channel('id', 'resource-id'); + * channel.stop(function(err, apiResponse) { + * if (!err) { + * // Channel stopped successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * channel.stop().then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + stop(callback) { + callback = callback || index_js_1.util.noop; + this.request({ + method: 'POST', + uri: '/stop', + json: this.metadata, + }, (err, apiResponse) => { + callback(err, apiResponse); + }); + } +} +exports.Channel = Channel; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Channel); + + +/***/ }), + +/***/ 4317: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _CRC32C_crc32c; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.CRC32C_EXTENSION_TABLE = exports.CRC32C_EXTENSIONS = exports.CRC32C_EXCEPTION_MESSAGES = exports.CRC32C_DEFAULT_VALIDATOR_GENERATOR = exports.CRC32C = void 0; +const fs_1 = __nccwpck_require__(9896); +/** + * Ported from {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc#L16-L59 github.com/google/crc32c} + */ +const CRC32C_EXTENSIONS = [ + 0x00000000, 0xf26b8303, 0xe13b70f7, 0x1350f3f4, 0xc79a971f, 0x35f1141c, + 0x26a1e7e8, 0xd4ca64eb, 0x8ad958cf, 0x78b2dbcc, 0x6be22838, 0x9989ab3b, + 0x4d43cfd0, 0xbf284cd3, 0xac78bf27, 0x5e133c24, 0x105ec76f, 0xe235446c, + 0xf165b798, 0x030e349b, 0xd7c45070, 0x25afd373, 0x36ff2087, 0xc494a384, + 0x9a879fa0, 0x68ec1ca3, 0x7bbcef57, 0x89d76c54, 0x5d1d08bf, 0xaf768bbc, + 0xbc267848, 0x4e4dfb4b, 0x20bd8ede, 0xd2d60ddd, 0xc186fe29, 0x33ed7d2a, + 0xe72719c1, 0x154c9ac2, 0x061c6936, 0xf477ea35, 0xaa64d611, 0x580f5512, + 0x4b5fa6e6, 0xb93425e5, 0x6dfe410e, 0x9f95c20d, 0x8cc531f9, 0x7eaeb2fa, + 0x30e349b1, 0xc288cab2, 0xd1d83946, 0x23b3ba45, 0xf779deae, 0x05125dad, + 0x1642ae59, 0xe4292d5a, 0xba3a117e, 0x4851927d, 0x5b016189, 0xa96ae28a, + 0x7da08661, 0x8fcb0562, 0x9c9bf696, 0x6ef07595, 0x417b1dbc, 0xb3109ebf, + 0xa0406d4b, 0x522bee48, 0x86e18aa3, 0x748a09a0, 0x67dafa54, 0x95b17957, + 0xcba24573, 0x39c9c670, 0x2a993584, 0xd8f2b687, 0x0c38d26c, 0xfe53516f, + 0xed03a29b, 0x1f682198, 0x5125dad3, 0xa34e59d0, 0xb01eaa24, 0x42752927, + 0x96bf4dcc, 0x64d4cecf, 0x77843d3b, 0x85efbe38, 0xdbfc821c, 0x2997011f, + 0x3ac7f2eb, 0xc8ac71e8, 0x1c661503, 0xee0d9600, 0xfd5d65f4, 0x0f36e6f7, + 0x61c69362, 0x93ad1061, 0x80fde395, 0x72966096, 0xa65c047d, 0x5437877e, + 0x4767748a, 0xb50cf789, 0xeb1fcbad, 0x197448ae, 0x0a24bb5a, 0xf84f3859, + 0x2c855cb2, 0xdeeedfb1, 0xcdbe2c45, 0x3fd5af46, 0x7198540d, 0x83f3d70e, + 0x90a324fa, 0x62c8a7f9, 0xb602c312, 0x44694011, 0x5739b3e5, 0xa55230e6, + 0xfb410cc2, 0x092a8fc1, 0x1a7a7c35, 0xe811ff36, 0x3cdb9bdd, 0xceb018de, + 0xdde0eb2a, 0x2f8b6829, 0x82f63b78, 0x709db87b, 0x63cd4b8f, 0x91a6c88c, + 0x456cac67, 0xb7072f64, 0xa457dc90, 0x563c5f93, 0x082f63b7, 0xfa44e0b4, + 0xe9141340, 0x1b7f9043, 0xcfb5f4a8, 0x3dde77ab, 0x2e8e845f, 0xdce5075c, + 0x92a8fc17, 0x60c37f14, 0x73938ce0, 0x81f80fe3, 0x55326b08, 0xa759e80b, + 0xb4091bff, 0x466298fc, 0x1871a4d8, 0xea1a27db, 0xf94ad42f, 0x0b21572c, + 0xdfeb33c7, 0x2d80b0c4, 0x3ed04330, 0xccbbc033, 0xa24bb5a6, 0x502036a5, + 0x4370c551, 0xb11b4652, 0x65d122b9, 0x97baa1ba, 0x84ea524e, 0x7681d14d, + 0x2892ed69, 0xdaf96e6a, 0xc9a99d9e, 0x3bc21e9d, 0xef087a76, 0x1d63f975, + 0x0e330a81, 0xfc588982, 0xb21572c9, 0x407ef1ca, 0x532e023e, 0xa145813d, + 0x758fe5d6, 0x87e466d5, 0x94b49521, 0x66df1622, 0x38cc2a06, 0xcaa7a905, + 0xd9f75af1, 0x2b9cd9f2, 0xff56bd19, 0x0d3d3e1a, 0x1e6dcdee, 0xec064eed, + 0xc38d26c4, 0x31e6a5c7, 0x22b65633, 0xd0ddd530, 0x0417b1db, 0xf67c32d8, + 0xe52cc12c, 0x1747422f, 0x49547e0b, 0xbb3ffd08, 0xa86f0efc, 0x5a048dff, + 0x8ecee914, 0x7ca56a17, 0x6ff599e3, 0x9d9e1ae0, 0xd3d3e1ab, 0x21b862a8, + 0x32e8915c, 0xc083125f, 0x144976b4, 0xe622f5b7, 0xf5720643, 0x07198540, + 0x590ab964, 0xab613a67, 0xb831c993, 0x4a5a4a90, 0x9e902e7b, 0x6cfbad78, + 0x7fab5e8c, 0x8dc0dd8f, 0xe330a81a, 0x115b2b19, 0x020bd8ed, 0xf0605bee, + 0x24aa3f05, 0xd6c1bc06, 0xc5914ff2, 0x37faccf1, 0x69e9f0d5, 0x9b8273d6, + 0x88d28022, 0x7ab90321, 0xae7367ca, 0x5c18e4c9, 0x4f48173d, 0xbd23943e, + 0xf36e6f75, 0x0105ec76, 0x12551f82, 0xe03e9c81, 0x34f4f86a, 0xc69f7b69, + 0xd5cf889d, 0x27a40b9e, 0x79b737ba, 0x8bdcb4b9, 0x988c474d, 0x6ae7c44e, + 0xbe2da0a5, 0x4c4623a6, 0x5f16d052, 0xad7d5351, +]; +exports.CRC32C_EXTENSIONS = CRC32C_EXTENSIONS; +const CRC32C_EXTENSION_TABLE = new Int32Array(CRC32C_EXTENSIONS); +exports.CRC32C_EXTENSION_TABLE = CRC32C_EXTENSION_TABLE; +const CRC32C_DEFAULT_VALIDATOR_GENERATOR = () => new CRC32C(); +exports.CRC32C_DEFAULT_VALIDATOR_GENERATOR = CRC32C_DEFAULT_VALIDATOR_GENERATOR; +const CRC32C_EXCEPTION_MESSAGES = { + INVALID_INIT_BASE64_RANGE: (l) => `base64-encoded data expected to equal 4 bytes, not ${l}`, + INVALID_INIT_BUFFER_LENGTH: (l) => `Buffer expected to equal 4 bytes, not ${l}`, + INVALID_INIT_INTEGER: (l) => `Number expected to be a safe, unsigned 32-bit integer, not ${l}`, +}; +exports.CRC32C_EXCEPTION_MESSAGES = CRC32C_EXCEPTION_MESSAGES; +class CRC32C { + /** + * Constructs a new `CRC32C` object. + * + * Reconstruction is recommended via the `CRC32C.from` static method. + * + * @param initialValue An initial CRC32C value - a signed 32-bit integer. + */ + constructor(initialValue = 0) { + /** Current CRC32C value */ + _CRC32C_crc32c.set(this, 0); + __classPrivateFieldSet(this, _CRC32C_crc32c, initialValue, "f"); + } + /** + * Calculates a CRC32C from a provided buffer. + * + * Implementation inspired from: + * - {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc github.com/google/crc32c} + * - {@link https://github.com/googleapis/python-crc32c/blob/a595e758c08df445a99c3bf132ee8e80a3ec4308/src/google_crc32c/python.py github.com/googleapis/python-crc32c} + * - {@link https://github.com/googleapis/java-storage/pull/1376/files github.com/googleapis/java-storage} + * + * @param data The `Buffer` to generate the CRC32C from + */ + update(data) { + let current = __classPrivateFieldGet(this, _CRC32C_crc32c, "f") ^ 0xffffffff; + for (const d of data) { + const tablePoly = CRC32C.CRC32C_EXTENSION_TABLE[(d ^ current) & 0xff]; + current = tablePoly ^ (current >>> 8); + } + __classPrivateFieldSet(this, _CRC32C_crc32c, current ^ 0xffffffff, "f"); + } + /** + * Validates a provided input to the current CRC32C value. + * + * @param input A Buffer, `CRC32C`-compatible object, base64-encoded data (string), or signed 32-bit integer + */ + validate(input) { + if (typeof input === 'number') { + return input === __classPrivateFieldGet(this, _CRC32C_crc32c, "f"); + } + else if (typeof input === 'string') { + return input === this.toString(); + } + else if (Buffer.isBuffer(input)) { + return Buffer.compare(input, this.toBuffer()) === 0; + } + else { + // `CRC32C`-like object + return input.toString() === this.toString(); + } + } + /** + * Returns a `Buffer` representation of the CRC32C value + */ + toBuffer() { + const buffer = Buffer.alloc(4); + buffer.writeInt32BE(__classPrivateFieldGet(this, _CRC32C_crc32c, "f")); + return buffer; + } + /** + * Returns a JSON-compatible, base64-encoded representation of the CRC32C value. + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify `JSON#stringify`} + */ + toJSON() { + return this.toString(); + } + /** + * Returns a base64-encoded representation of the CRC32C value. + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/toString `Object#toString`} + */ + toString() { + return this.toBuffer().toString('base64'); + } + /** + * Returns the `number` representation of the CRC32C value as a signed 32-bit integer + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/valueOf `Object#valueOf`} + */ + valueOf() { + return __classPrivateFieldGet(this, _CRC32C_crc32c, "f"); + } + /** + * Generates a `CRC32C` from a compatible buffer format. + * + * @param value 4-byte `ArrayBufferView`/`Buffer`/`TypedArray` + */ + static fromBuffer(value) { + let buffer; + if (Buffer.isBuffer(value)) { + buffer = value; + } + else if ('buffer' in value) { + // `ArrayBufferView` + buffer = Buffer.from(value.buffer); + } + else { + // `ArrayBuffer` + buffer = Buffer.from(value); + } + if (buffer.byteLength !== 4) { + throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_BUFFER_LENGTH(buffer.byteLength)); + } + return new CRC32C(buffer.readInt32BE()); + } + static async fromFile(file) { + const crc32c = new CRC32C(); + await new Promise((resolve, reject) => { + (0, fs_1.createReadStream)(file) + .on('data', (d) => { + if (typeof d === 'string') { + crc32c.update(Buffer.from(d)); + } + else { + crc32c.update(d); + } + }) + .on('end', () => resolve()) + .on('error', reject); + }); + return crc32c; + } + /** + * Generates a `CRC32C` from 4-byte base64-encoded data (string). + * + * @param value 4-byte base64-encoded data (string) + */ + static fromString(value) { + const buffer = Buffer.from(value, 'base64'); + if (buffer.byteLength !== 4) { + throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_BASE64_RANGE(buffer.byteLength)); + } + return this.fromBuffer(buffer); + } + /** + * Generates a `CRC32C` from a safe, unsigned 32-bit integer. + * + * @param value an unsigned 32-bit integer + */ + static fromNumber(value) { + if (!Number.isSafeInteger(value) || value > 2 ** 32 || value < -(2 ** 32)) { + throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_INTEGER(value)); + } + return new CRC32C(value); + } + /** + * Generates a `CRC32C` from a variety of compatable types. + * Note: strings are treated as input, not as file paths to read from. + * + * @param value A number, 4-byte `ArrayBufferView`/`Buffer`/`TypedArray`, or 4-byte base64-encoded data (string) + */ + static from(value) { + if (typeof value === 'number') { + return this.fromNumber(value); + } + else if (typeof value === 'string') { + return this.fromString(value); + } + else if ('byteLength' in value) { + // `ArrayBuffer` | `Buffer` | `ArrayBufferView` + return this.fromBuffer(value); + } + else { + // `CRC32CValidator`/`CRC32C`-like + return this.fromString(value.toString()); + } + } +} +exports.CRC32C = CRC32C; +_CRC32C_crc32c = new WeakMap(); +CRC32C.CRC32C_EXTENSIONS = CRC32C_EXTENSIONS; +CRC32C.CRC32C_EXTENSION_TABLE = CRC32C_EXTENSION_TABLE; + + +/***/ }), + +/***/ 9975: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +var _File_instances, _File_validateIntegrity; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.File = exports.FileExceptionMessages = exports.RequestError = exports.STORAGE_POST_POLICY_BASE_URL = exports.ActionToHTTPMethod = void 0; +const index_js_1 = __nccwpck_require__(1325); +const promisify_1 = __nccwpck_require__(206); +const crypto = __importStar(__nccwpck_require__(6982)); +const fs = __importStar(__nccwpck_require__(9896)); +const mime_1 = __importDefault(__nccwpck_require__(4900)); +const resumableUpload = __importStar(__nccwpck_require__(8043)); +const stream_1 = __nccwpck_require__(2203); +const zlib = __importStar(__nccwpck_require__(3106)); +const storage_js_1 = __nccwpck_require__(2848); +const bucket_js_1 = __nccwpck_require__(2887); +const acl_js_1 = __nccwpck_require__(6637); +const signer_js_1 = __nccwpck_require__(7319); +const util_js_1 = __nccwpck_require__(7325); +const duplexify_1 = __importDefault(__nccwpck_require__(9112)); +const util_js_2 = __nccwpck_require__(4557); +const crc32c_js_1 = __nccwpck_require__(4317); +const hash_stream_validator_js_1 = __nccwpck_require__(4873); +const async_retry_1 = __importDefault(__nccwpck_require__(5195)); +var ActionToHTTPMethod; +(function (ActionToHTTPMethod) { + ActionToHTTPMethod["read"] = "GET"; + ActionToHTTPMethod["write"] = "PUT"; + ActionToHTTPMethod["delete"] = "DELETE"; + ActionToHTTPMethod["resumable"] = "POST"; +})(ActionToHTTPMethod || (exports.ActionToHTTPMethod = ActionToHTTPMethod = {})); +/** + * @deprecated - no longer used + */ +exports.STORAGE_POST_POLICY_BASE_URL = 'https://storage.googleapis.com'; +/** + * @private + */ +const GS_URL_REGEXP = /^gs:\/\/([a-z0-9_.-]+)\/(.+)$/; +/** + * @private + * This regex will match compressible content types. These are primarily text/*, +json, +text, +xml content types. + * This was based off of mime-db and may periodically need to be updated if new compressible content types become + * standards. + */ +const COMPRESSIBLE_MIME_REGEX = new RegExp([ + /^text\/|application\/ecmascript|application\/javascript|application\/json/, + /|application\/postscript|application\/rtf|application\/toml|application\/vnd.dart/, + /|application\/vnd.ms-fontobject|application\/wasm|application\/x-httpd-php|application\/x-ns-proxy-autoconfig/, + /|application\/x-sh(?!ockwave-flash)|application\/x-tar|application\/x-virtualbox-hdd|application\/x-virtualbox-ova|application\/x-virtualbox-ovf/, + /|^application\/x-virtualbox-vbox$|application\/x-virtualbox-vdi|application\/x-virtualbox-vhd|application\/x-virtualbox-vmdk/, + /|application\/xml|application\/xml-dtd|font\/otf|font\/ttf|image\/bmp|image\/vnd.adobe.photoshop|image\/vnd.microsoft.icon/, + /|image\/vnd.ms-dds|image\/x-icon|image\/x-ms-bmp|message\/rfc822|model\/gltf-binary|\+json|\+text|\+xml|\+yaml/, +] + .map(r => r.source) + .join(''), 'i'); +class RequestError extends Error { +} +exports.RequestError = RequestError; +const SEVEN_DAYS = 7 * 24 * 60 * 60; +const GS_UTIL_URL_REGEX = /(gs):\/\/([a-z0-9_.-]+)\/(.+)/g; +const HTTPS_PUBLIC_URL_REGEX = /(https):\/\/(storage\.googleapis\.com)\/([a-z0-9_.-]+)\/(.+)/g; +var FileExceptionMessages; +(function (FileExceptionMessages) { + FileExceptionMessages["EXPIRATION_TIME_NA"] = "An expiration time is not available."; + FileExceptionMessages["DESTINATION_NO_NAME"] = "Destination file should have a name."; + FileExceptionMessages["INVALID_VALIDATION_FILE_RANGE"] = "Cannot use validation with file ranges (start/end)."; + FileExceptionMessages["MD5_NOT_AVAILABLE"] = "MD5 verification was specified, but is not available for the requested object. MD5 is not available for composite objects."; + FileExceptionMessages["EQUALS_CONDITION_TWO_ELEMENTS"] = "Equals condition must be an array of 2 elements."; + FileExceptionMessages["STARTS_WITH_TWO_ELEMENTS"] = "StartsWith condition must be an array of 2 elements."; + FileExceptionMessages["CONTENT_LENGTH_RANGE_MIN_MAX"] = "ContentLengthRange must have numeric min & max fields."; + FileExceptionMessages["DOWNLOAD_MISMATCH"] = "The downloaded data did not match the data from the server. To be sure the content is the same, you should download the file again."; + FileExceptionMessages["UPLOAD_MISMATCH_DELETE_FAIL"] = "The uploaded data did not match the data from the server.\n As a precaution, we attempted to delete the file, but it was not successful.\n To be sure the content is the same, you should try removing the file manually,\n then uploading the file again.\n \n\nThe delete attempt failed with this message:\n\n "; + FileExceptionMessages["UPLOAD_MISMATCH"] = "The uploaded data did not match the data from the server.\n As a precaution, the file has been deleted.\n To be sure the content is the same, you should try uploading the file again."; + FileExceptionMessages["MD5_RESUMED_UPLOAD"] = "MD5 cannot be used with a continued resumable upload as MD5 cannot be extended from an existing value"; + FileExceptionMessages["MISSING_RESUME_CRC32C_FINAL_UPLOAD"] = "The CRC32C is missing for the final portion of a resumed upload, which is required for validation. Please provide `resumeCRC32C` if validation is required, or disable `validation`."; +})(FileExceptionMessages || (exports.FileExceptionMessages = FileExceptionMessages = {})); +/** + * A File object is created from your {@link Bucket} object using + * {@link Bucket#file}. + * + * @class + */ +class File extends index_js_1.ServiceObject { + /** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * An ACL consists of one or more entries, where each entry grants permissions + * to an entity. Permissions define the actions that can be performed against + * an object or bucket (for example, `READ` or `WRITE`); the entity defines + * who the permission applies to (for example, a specific user or group of + * users). + * + * The `acl` object on a File instance provides methods to get you a list of + * the ACLs defined on your bucket, as well as set, update, and delete them. + * + * See {@link http://goo.gl/6qBBPO| About Access Control lists} + * + * @name File#acl + * @mixes Acl + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * //- + * // Make a file publicly readable. + * //- + * const options = { + * entity: 'allUsers', + * role: storage.acl.READER_ROLE + * }; + * + * file.acl.add(options, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + /** + * The API-formatted resource description of the file. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name File#metadata + * @type {object} + */ + /** + * The file's name. + * @name File#name + * @type {string} + */ + /** + * @callback Crc32cGeneratorToStringCallback + * A method returning the CRC32C as a base64-encoded string. + * + * @returns {string} + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ + /** + * @callback Crc32cGeneratorValidateCallback + * A method validating a base64-encoded CRC32C string. + * + * @param {string} [value] base64-encoded CRC32C string to validate + * @returns {boolean} + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + **/ + /** + * @callback Crc32cGeneratorUpdateCallback + * A method for passing `Buffer`s for CRC32C generation. + * + * @param {Buffer} [data] data to update CRC32C value with + * @returns {undefined} + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + **/ + /** + * @typedef {object} CRC32CValidator + * @property {Crc32cGeneratorToStringCallback} + * @property {Crc32cGeneratorValidateCallback} + * @property {Crc32cGeneratorUpdateCallback} + */ + /** + * @callback Crc32cGeneratorCallback + * @returns {CRC32CValidator} + */ + /** + * @typedef {object} FileOptions Options passed to the File constructor. + * @property {string} [encryptionKey] A custom encryption key. + * @property {number} [generation] Generation to scope the file to. + * @property {string} [kmsKeyName] Cloud KMS Key used to encrypt this + * object, if the object is encrypted by such a key. Limited availability; + * usable only by enabled projects. + * @property {string} [userProject] The ID of the project which will be + * billed for all requests made from File object. + * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C} + */ + /** + * Constructs a file object. + * + * @param {Bucket} bucket The Bucket instance this file is + * attached to. + * @param {string} name The name of the remote file. + * @param {FileOptions} [options] Configuration options. + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * ``` + */ + constructor(bucket, name, options = {}) { + var _a, _b; + const requestQueryObject = {}; + let generation; + if (options.generation !== null) { + if (typeof options.generation === 'string') { + generation = Number(options.generation); + } + else { + generation = options.generation; + } + if (!isNaN(generation)) { + requestQueryObject.generation = generation; + } + } + Object.assign(requestQueryObject, options.preconditionOpts); + const userProject = options.userProject || bucket.userProject; + if (typeof userProject === 'string') { + requestQueryObject.userProject = userProject; + } + const methods = { + /** + * @typedef {array} DeleteFileResponse + * @property {object} 0 The full API response. + */ + /** + * @callback DeleteFileCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Delete the file. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/delete| Objects: delete API Documentation} + * + * @method File#delete + * @param {object} [options] Configuration options. + * @param {boolean} [options.ignoreNotFound = false] Ignore an error if + * the file does not exist. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {DeleteFileCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * file.delete(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.delete().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_delete_file + * Another example: + */ + delete: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} FileExistsResponse + * @property {boolean} 0 Whether the {@link File} exists. + */ + /** + * @callback FileExistsCallback + * @param {?Error} err Request error, if any. + * @param {boolean} exists Whether the {@link File} exists. + */ + /** + * Check if the file exists. + * + * @method File#exists + * @param {options} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {FileExistsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.exists(function(err, exists) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.exists().then(function(data) { + * const exists = data[0]; + * }); + * ``` + */ + exists: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} GetFileResponse + * @property {File} 0 The {@link File}. + * @property {object} 1 The full API response. + */ + /** + * @callback GetFileCallback + * @param {?Error} err Request error, if any. + * @param {File} file The {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * Get a file object and its metadata if it exists. + * + * @method File#get + * @param {options} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {number} [options.generation] The generation number to get + * @param {string} [options.restoreToken] If this is a soft-deleted object in an HNS-enabled bucket, returns the restore token which will + * be necessary to restore it if there's a name conflict with another object. + * @param {boolean} [options.softDeleted] If true, returns the soft-deleted object. + Object `generation` is required if `softDeleted` is set to True. + * @param {GetFileCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.get(function(err, file, apiResponse) { + * // file.metadata` has been populated. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.get().then(function(data) { + * const file = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + get: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} GetFileMetadataResponse + * @property {object} 0 The {@link File} metadata. + * @property {object} 1 The full API response. + */ + /** + * @callback GetFileMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} metadata The {@link File} metadata. + * @param {object} apiResponse The full API response. + */ + /** + * Get the file's metadata. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/get| Objects: get API Documentation} + * + * @method File#getMetadata + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetFileMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.getMetadata(function(err, metadata, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.getMetadata().then(function(data) { + * const metadata = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_get_metadata + * Another example: + */ + getMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {object} SetFileMetadataOptions Configuration options for File#setMetadata(). + * @param {string} [userProject] The ID of the project which will be billed for the request. + */ + /** + * @callback SetFileMetadataCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} SetFileMetadataResponse + * @property {object} 0 The full API response. + */ + /** + * Merge the given metadata with the current remote file's metadata. This + * will set metadata if it was previously unset or update previously set + * metadata. To unset previously set metadata, set its value to null. + * + * You can set custom key/value pairs in the metadata key of the given + * object, however the other properties outside of this object must adhere + * to the {@link https://goo.gl/BOnnCK| official API documentation}. + * + * + * See the examples below for more information. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/patch| Objects: patch API Documentation} + * + * @method File#setMetadata + * @param {object} [metadata] The metadata you wish to update. + * @param {SetFileMetadataOptions} [options] Configuration options. + * @param {SetFileMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * const metadata = { + * contentType: 'application/x-font-ttf', + * metadata: { + * my: 'custom', + * properties: 'go here' + * } + * }; + * + * file.setMetadata(metadata, function(err, apiResponse) {}); + * + * // Assuming current metadata = { hello: 'world', unsetMe: 'will do' } + * file.setMetadata({ + * metadata: { + * abc: '123', // will be set. + * unsetMe: null, // will be unset (deleted). + * hello: 'goodbye' // will be updated from 'world' to 'goodbye'. + * } + * }, function(err, apiResponse) { + * // metadata should now be { abc: '123', hello: 'goodbye' } + * }); + * + * //- + * // Set a temporary hold on this file from its bucket's retention period + * // configuration. + * // + * file.setMetadata({ + * temporaryHold: true + * }, function(err, apiResponse) {}); + * + * //- + * // Alternatively, you may set a temporary hold. This will follow the + * // same behavior as an event-based hold, with the exception that the + * // bucket's retention policy will not renew for this file from the time + * // the hold is released. + * //- + * file.setMetadata({ + * eventBasedHold: true + * }, function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.setMetadata(metadata).then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + setMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + }; + super({ + parent: bucket, + baseUrl: '/o', + id: encodeURIComponent(name), + methods, + }); + _File_instances.add(this); + this.bucket = bucket; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this.storage = bucket.parent; + // @TODO Can this duplicate code from above be avoided? + if (options.generation !== null) { + let generation; + if (typeof options.generation === 'string') { + generation = Number(options.generation); + } + else { + generation = options.generation; + } + if (!isNaN(generation)) { + this.generation = generation; + } + } + this.kmsKeyName = options.kmsKeyName; + this.userProject = userProject; + this.name = name; + if (options.encryptionKey) { + this.setEncryptionKey(options.encryptionKey); + } + this.acl = new acl_js_1.Acl({ + request: this.request.bind(this), + pathPrefix: '/acl', + }); + this.crc32cGenerator = + options.crc32cGenerator || this.bucket.crc32cGenerator; + this.instanceRetryValue = (_b = (_a = this.storage) === null || _a === void 0 ? void 0 : _a.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry; + this.instancePreconditionOpts = options === null || options === void 0 ? void 0 : options.preconditionOpts; + } + /** + * The object's Cloud Storage URI (`gs://`) + * + * @example + * ```ts + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('image.png'); + * + * // `gs://my-bucket/image.png` + * const href = file.cloudStorageURI.href; + * ``` + */ + get cloudStorageURI() { + const uri = this.bucket.cloudStorageURI; + uri.pathname = this.name; + return uri; + } + /** + * A helper method for determining if a request should be retried based on preconditions. + * This should only be used for methods where the idempotency is determined by + * `ifGenerationMatch` + * @private + * + * A request should not be retried under the following conditions: + * - if precondition option `ifGenerationMatch` is not set OR + * - if `idempotencyStrategy` is set to `RetryNever` + */ + shouldRetryBasedOnPreconditionAndIdempotencyStrat(options) { + var _a; + return !(((options === null || options === void 0 ? void 0 : options.ifGenerationMatch) === undefined && + ((_a = this.instancePreconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever); + } + /** + * @typedef {array} CopyResponse + * @property {File} 0 The copied {@link File}. + * @property {object} 1 The full API response. + */ + /** + * @callback CopyCallback + * @param {?Error} err Request error, if any. + * @param {File} copiedFile The copied {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} CopyOptions Configuration options for File#copy(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @property {string} [cacheControl] The cacheControl setting for the new file. + * @property {string} [contentEncoding] The contentEncoding setting for the new file. + * @property {string} [contentType] The contentType setting for the new file. + * @property {string} [destinationKmsKeyName] Resource name of the Cloud + * KMS key, of the form + * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`, + * that will be used to encrypt the object. Overwrites the object + * metadata's `kms_key_name` value, if any. + * @property {Metadata} [metadata] Metadata to specify on the copied file. + * @property {string} [predefinedAcl] Set the ACL for the new file. + * @property {string} [token] A previously-returned `rewriteToken` from an + * unfinished rewrite request. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Copy this file to another file. By default, this will copy the file to the + * same bucket, but you can choose to copy it to another Bucket by providing + * a Bucket or File object or a URL starting with "gs://". + * The generation of the file will not be preserved. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/rewrite| Objects: rewrite API Documentation} + * + * @throws {Error} If the destination file is not provided. + * + * @param {string|Bucket|File} destination Destination file. + * @param {CopyOptions} [options] Configuration options. See an + * @param {CopyCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // You can pass in a variety of types for the destination. + * // + * // For all of the below examples, assume we are working with the following + * // Bucket and File objects. + * //- + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('my-image.png'); + * + * //- + * // If you pass in a string for the destination, the file is copied to its + * // current bucket, under the new name provided. + * //- + * file.copy('my-image-copy.png', function(err, copiedFile, apiResponse) { + * // `my-bucket` now contains: + * // - "my-image.png" + * // - "my-image-copy.png" + * + * // `copiedFile` is an instance of a File object that refers to your new + * // file. + * }); + * + * //- + * // If you pass in a string starting with "gs://" for the destination, the + * // file is copied to the other bucket and under the new name provided. + * //- + * const newLocation = 'gs://another-bucket/my-image-copy.png'; + * file.copy(newLocation, function(err, copiedFile, apiResponse) { + * // `my-bucket` still contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-image-copy.png" + * + * // `copiedFile` is an instance of a File object that refers to your new + * // file. + * }); + * + * //- + * // If you pass in a Bucket object, the file will be copied to that bucket + * // using the same name. + * //- + * const anotherBucket = storage.bucket('another-bucket'); + * file.copy(anotherBucket, function(err, copiedFile, apiResponse) { + * // `my-bucket` still contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-image.png" + * + * // `copiedFile` is an instance of a File object that refers to your new + * // file. + * }); + * + * //- + * // If you pass in a File object, you have complete control over the new + * // bucket and filename. + * //- + * const anotherFile = anotherBucket.file('my-awesome-image.png'); + * file.copy(anotherFile, function(err, copiedFile, apiResponse) { + * // `my-bucket` still contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-awesome-image.png" + * + * // Note: + * // The `copiedFile` parameter is equal to `anotherFile`. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.copy(newLocation).then(function(data) { + * const newFile = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_copy_file + * Another example: + */ + copy(destination, optionsOrCallback, callback) { + var _a, _b; + const noDestinationError = new Error(FileExceptionMessages.DESTINATION_NO_NAME); + if (!destination) { + throw noDestinationError; + } + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = { ...optionsOrCallback }; + } + callback = callback || index_js_1.util.noop; + let destBucket; + let destName; + let newFile; + if (typeof destination === 'string') { + const parsedDestination = GS_URL_REGEXP.exec(destination); + if (parsedDestination !== null && parsedDestination.length === 3) { + destBucket = this.storage.bucket(parsedDestination[1]); + destName = parsedDestination[2]; + } + else { + destBucket = this.bucket; + destName = destination; + } + } + else if (destination instanceof bucket_js_1.Bucket) { + destBucket = destination; + destName = this.name; + } + else if (destination instanceof File) { + destBucket = destination.bucket; + destName = destination.name; + newFile = destination; + } + else { + throw noDestinationError; + } + const query = {}; + if (this.generation !== undefined) { + query.sourceGeneration = this.generation; + } + if (options.token !== undefined) { + query.rewriteToken = options.token; + } + if (options.userProject !== undefined) { + query.userProject = options.userProject; + delete options.userProject; + } + if (options.predefinedAcl !== undefined) { + query.destinationPredefinedAcl = options.predefinedAcl; + delete options.predefinedAcl; + } + newFile = newFile || destBucket.file(destName); + const headers = {}; + if (this.encryptionKey !== undefined) { + headers['x-goog-copy-source-encryption-algorithm'] = 'AES256'; + headers['x-goog-copy-source-encryption-key'] = this.encryptionKeyBase64; + headers['x-goog-copy-source-encryption-key-sha256'] = + this.encryptionKeyHash; + } + if (newFile.encryptionKey !== undefined) { + this.setEncryptionKey(newFile.encryptionKey); + } + else if (options.destinationKmsKeyName !== undefined) { + query.destinationKmsKeyName = options.destinationKmsKeyName; + delete options.destinationKmsKeyName; + } + else if (newFile.kmsKeyName !== undefined) { + query.destinationKmsKeyName = newFile.kmsKeyName; + } + if (query.destinationKmsKeyName) { + this.kmsKeyName = query.destinationKmsKeyName; + const keyIndex = this.interceptors.indexOf(this.encryptionKeyInterceptor); + if (keyIndex > -1) { + this.interceptors.splice(keyIndex, 1); + } + } + if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) { + this.storage.retryOptions.autoRetry = false; + } + if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined) { + query.ifGenerationMatch = (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch; + delete options.preconditionOpts; + } + this.request({ + method: 'POST', + uri: `/rewriteTo/b/${destBucket.name}/o/${encodeURIComponent(newFile.name)}`, + qs: query, + json: options, + headers, + }, (err, resp) => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + if (err) { + callback(err, null, resp); + return; + } + if (resp.rewriteToken) { + const options = { + token: resp.rewriteToken, + }; + if (query.userProject) { + options.userProject = query.userProject; + } + if (query.destinationKmsKeyName) { + options.destinationKmsKeyName = query.destinationKmsKeyName; + } + this.copy(newFile, options, callback); + return; + } + callback(null, newFile, resp); + }); + } + /** + * @typedef {object} CreateReadStreamOptions Configuration options for File#createReadStream. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string|boolean} [validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with a + * CRC32c checksum. You may use MD5 if preferred, but that hash is not + * supported for composite objects. An error will be raised if MD5 is + * specified but is not available. You may also choose to skip validation + * completely, however this is **not recommended**. + * @property {number} [start] A byte offset to begin the file's download + * from. Default is 0. NOTE: Byte ranges are inclusive; that is, + * `options.start = 0` and `options.end = 999` represent the first 1000 + * bytes in a file or object. NOTE: when specifying a byte range, data + * integrity is not available. + * @property {number} [end] A byte offset to stop reading the file at. + * NOTE: Byte ranges are inclusive; that is, `options.start = 0` and + * `options.end = 999` represent the first 1000 bytes in a file or object. + * NOTE: when specifying a byte range, data integrity is not available. + * @property {boolean} [decompress=true] Disable auto decompression of the + * received data. By default this option is set to `true`. + * Applicable in cases where the data was uploaded with + * `gzip: true` option. See {@link File#createWriteStream}. + */ + /** + * Create a readable stream to read the contents of the remote file. It can be + * piped to a writable stream or listened to for 'data' events to read a + * file's contents. + * + * In the unlikely event there is a mismatch between what you downloaded and + * the version in your Bucket, your error handler will receive an error with + * code "CONTENT_DOWNLOAD_MISMATCH". If you receive this error, the best + * recourse is to try downloading the file again. + * + * NOTE: Readable streams will emit the `end` event when the file is fully + * downloaded. + * + * @param {CreateReadStreamOptions} [options] Configuration options. + * @returns {ReadableStream} + * + * @example + * ``` + * //- + * //

Downloading a File

+ * // + * // The example below demonstrates how we can reference a remote file, then + * // pipe its contents to a local file. This is effectively creating a local + * // backup of your remote data. + * //- + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * const fs = require('fs'); + * const remoteFile = bucket.file('image.png'); + * const localFilename = '/Users/stephen/Photos/image.png'; + * + * remoteFile.createReadStream() + * .on('error', function(err) {}) + * .on('response', function(response) { + * // Server connected and responded with the specified status and headers. + * }) + * .on('end', function() { + * // The file is fully downloaded. + * }) + * .pipe(fs.createWriteStream(localFilename)); + * + * //- + * // To limit the downloaded data to only a byte range, pass an options + * // object. + * //- + * const logFile = myBucket.file('access_log'); + * logFile.createReadStream({ + * start: 10000, + * end: 20000 + * }) + * .on('error', function(err) {}) + * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt')); + * + * //- + * // To read a tail byte range, specify only `options.end` as a negative + * // number. + * //- + * const logFile = myBucket.file('access_log'); + * logFile.createReadStream({ + * end: -100 + * }) + * .on('error', function(err) {}) + * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt')); + * ``` + */ + createReadStream(options = {}) { + options = Object.assign({ decompress: true }, options); + const rangeRequest = typeof options.start === 'number' || typeof options.end === 'number'; + const tailRequest = options.end < 0; + let validateStream = undefined; + let request = undefined; + const throughStream = new util_js_2.PassThroughShim(); + let crc32c = true; + let md5 = false; + if (typeof options.validation === 'string') { + const value = options.validation.toLowerCase().trim(); + crc32c = value === 'crc32c'; + md5 = value === 'md5'; + } + else if (options.validation === false) { + crc32c = false; + } + const shouldRunValidation = !rangeRequest && (crc32c || md5); + if (rangeRequest) { + if (typeof options.validation === 'string' || + options.validation === true) { + throw new Error(FileExceptionMessages.INVALID_VALIDATION_FILE_RANGE); + } + // Range requests can't receive data integrity checks. + crc32c = false; + md5 = false; + } + const onComplete = (err) => { + if (err) { + // There is an issue with node-fetch 2.x that if the stream errors the underlying socket connection is not closed. + // This causes a memory leak, so cleanup the sockets manually here by destroying the agent. + if (request === null || request === void 0 ? void 0 : request.agent) { + request.agent.destroy(); + } + throughStream.destroy(err); + } + }; + // We listen to the response event from the request stream so that we + // can... + // + // 1) Intercept any data from going to the user if an error occurred. + // 2) Calculate the hashes from the http.IncomingMessage response + // stream, + // which will return the bytes from the source without decompressing + // gzip'd content. We then send it through decompressed, if + // applicable, to the user. + const onResponse = (err, _body, rawResponseStream) => { + if (err) { + // Get error message from the body. + this.getBufferFromReadable(rawResponseStream).then(body => { + err.message = body.toString('utf8'); + throughStream.destroy(err); + }); + return; + } + request = rawResponseStream.request; + const headers = rawResponseStream.toJSON().headers; + const isCompressed = headers['content-encoding'] === 'gzip'; + const hashes = {}; + // The object is safe to validate if: + // 1. It was stored gzip and returned to us gzip OR + // 2. It was never stored as gzip + const safeToValidate = (headers['x-goog-stored-content-encoding'] === 'gzip' && + isCompressed) || + headers['x-goog-stored-content-encoding'] === 'identity'; + const transformStreams = []; + if (shouldRunValidation) { + // The x-goog-hash header should be set with a crc32c and md5 hash. + // ex: headers['x-goog-hash'] = 'crc32c=xxxx,md5=xxxx' + if (typeof headers['x-goog-hash'] === 'string') { + headers['x-goog-hash'] + .split(',') + .forEach((hashKeyValPair) => { + const delimiterIndex = hashKeyValPair.indexOf('='); + const hashType = hashKeyValPair.substring(0, delimiterIndex); + const hashValue = hashKeyValPair.substring(delimiterIndex + 1); + hashes[hashType] = hashValue; + }); + } + validateStream = new hash_stream_validator_js_1.HashStreamValidator({ + crc32c, + md5, + crc32cGenerator: this.crc32cGenerator, + crc32cExpected: hashes.crc32c, + md5Expected: hashes.md5, + }); + } + if (md5 && !hashes.md5) { + const hashError = new RequestError(FileExceptionMessages.MD5_NOT_AVAILABLE); + hashError.code = 'MD5_NOT_AVAILABLE'; + throughStream.destroy(hashError); + return; + } + if (safeToValidate && shouldRunValidation && validateStream) { + transformStreams.push(validateStream); + } + if (isCompressed && options.decompress) { + transformStreams.push(zlib.createGunzip()); + } + (0, stream_1.pipeline)(rawResponseStream, ...transformStreams, throughStream, onComplete); + }; + // Authenticate the request, then pipe the remote API request to the stream + // returned to the user. + const makeRequest = () => { + const query = { alt: 'media' }; + if (this.generation) { + query.generation = this.generation; + } + if (options.userProject) { + query.userProject = options.userProject; + } + const headers = { + 'Accept-Encoding': 'gzip', + 'Cache-Control': 'no-store', + }; + if (rangeRequest) { + const start = typeof options.start === 'number' ? options.start : '0'; + const end = typeof options.end === 'number' ? options.end : ''; + headers.Range = `bytes=${tailRequest ? end : `${start}-${end}`}`; + } + const reqOpts = { + uri: '', + headers, + qs: query, + }; + if (options[util_js_1.GCCL_GCS_CMD_KEY]) { + reqOpts[util_js_1.GCCL_GCS_CMD_KEY] = options[util_js_1.GCCL_GCS_CMD_KEY]; + } + this.requestStream(reqOpts) + .on('error', err => { + throughStream.destroy(err); + }) + .on('response', res => { + throughStream.emit('response', res); + index_js_1.util.handleResp(null, res, null, onResponse); + }) + .resume(); + }; + throughStream.on('reading', makeRequest); + return throughStream; + } + /** + * @callback CreateResumableUploadCallback + * @param {?Error} err Request error, if any. + * @param {string} uri The resumable upload's unique session URI. + */ + /** + * @typedef {array} CreateResumableUploadResponse + * @property {string} 0 The resumable upload's unique session URI. + */ + /** + * @typedef {object} CreateResumableUploadOptions + * @property {object} [metadata] Metadata to set on the file. + * @property {number} [offset] The starting byte of the upload stream for resuming an interrupted upload. + * @property {string} [origin] Origin header to set for the upload. + * @property {string} [predefinedAcl] Apply a predefined set of access + * controls to this object. + * + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @property {boolean} [private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @property {boolean} [public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string} [chunkSize] Create a separate request per chunk. This + * value is in bytes and should be a multiple of 256 KiB (2^18). + * {@link https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload| We recommend using at least 8 MiB for the chunk size.} + */ + /** + * Create a unique resumable upload session URI. This is the first step when + * performing a resumable upload. + * + * See the {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload| Resumable upload guide} + * for more on how the entire process works. + * + *

Note

+ * + * If you are just looking to perform a resumable upload without worrying + * about any of the details, see {@link File#createWriteStream}. Resumable + * uploads are performed by default. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload| Resumable upload guide} + * + * @param {CreateResumableUploadOptions} [options] Configuration options. + * @param {CreateResumableUploadCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * file.createResumableUpload(function(err, uri) { + * if (!err) { + * // `uri` can be used to PUT data to. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.createResumableUpload().then(function(data) { + * const uri = data[0]; + * }); + * ``` + */ + createResumableUpload(optionsOrCallback, callback) { + var _a, _b; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const retryOptions = this.storage.retryOptions; + if ((((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined && + ((_b = this.instancePreconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + retryOptions.autoRetry = false; + } + resumableUpload.createURI({ + authClient: this.storage.authClient, + apiEndpoint: this.storage.apiEndpoint, + bucket: this.bucket.name, + customRequestOptions: this.getRequestInterceptors().reduce((reqOpts, interceptorFn) => interceptorFn(reqOpts), {}), + file: this.name, + generation: this.generation, + key: this.encryptionKey, + kmsKeyName: this.kmsKeyName, + metadata: options.metadata, + offset: options.offset, + origin: options.origin, + predefinedAcl: options.predefinedAcl, + private: options.private, + public: options.public, + userProject: options.userProject || this.userProject, + retryOptions: retryOptions, + params: (options === null || options === void 0 ? void 0 : options.preconditionOpts) || this.instancePreconditionOpts, + universeDomain: this.bucket.storage.universeDomain, + [util_js_1.GCCL_GCS_CMD_KEY]: options[util_js_1.GCCL_GCS_CMD_KEY], + }, callback); + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + } + /** + * @typedef {object} CreateWriteStreamOptions Configuration options for File#createWriteStream(). + * @property {string} [contentType] Alias for + * `options.metadata.contentType`. If set to `auto`, the file name is used + * to determine the contentType. + * @property {string|boolean} [gzip] If true, automatically gzip the file. + * If set to `auto`, the contentType is used to determine if the file + * should be gzipped. This will set `options.metadata.contentEncoding` to + * `gzip` if necessary. + * @property {object} [metadata] See the examples below or + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body} + * for more details. + * @property {number} [offset] The starting byte of the upload stream, for + * resuming an interrupted upload. Defaults to 0. + * @property {string} [predefinedAcl] Apply a predefined set of access + * controls to this object. + * + * Acceptable values are: + * - **`authenticatedRead`** - Object owner gets `OWNER` access, and + * `allAuthenticatedUsers` get `READER` access. + * + * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and + * project team owners get `OWNER` access. + * + * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project + * team owners get `READER` access. + * + * - **`private`** - Object owner gets `OWNER` access. + * + * - **`projectPrivate`** - Object owner gets `OWNER` access, and project + * team members get access according to their roles. + * + * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` + * get `READER` access. + * @property {boolean} [private] Make the uploaded file private. (Alias for + * `options.predefinedAcl = 'private'`) + * @property {boolean} [public] Make the uploaded file public. (Alias for + * `options.predefinedAcl = 'publicRead'`) + * @property {boolean} [resumable] Force a resumable upload. NOTE: When + * working with streams, the file format and size is unknown until it's + * completely consumed. Because of this, it's best for you to be explicit + * for what makes sense given your input. + * @property {number} [timeout=60000] Set the HTTP request timeout in + * milliseconds. This option is not available for resumable uploads. + * Default: `60000` + * @property {string} [uri] The URI for an already-created resumable + * upload. See {@link File#createResumableUpload}. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {string|boolean} [validation] Possible values: `"md5"`, + * `"crc32c"`, or `false`. By default, data integrity is validated with a + * CRC32c checksum. You may use MD5 if preferred, but that hash is not + * supported for composite objects. An error will be raised if MD5 is + * specified but is not available. You may also choose to skip validation + * completely, however this is **not recommended**. In addition to specifying + * validation type, providing `metadata.crc32c` or `metadata.md5Hash` will + * cause the server to perform validation in addition to client validation. + * NOTE: Validation is automatically skipped for objects that were + * uploaded using the `gzip` option and have already compressed content. + */ + /** + * Create a writable stream to overwrite the contents of the file in your + * bucket. + * + * A File object can also be used to create files for the first time. + * + * Resumable uploads are automatically enabled and must be shut off explicitly + * by setting `options.resumable` to `false`. + * + * + *

+ * There is some overhead when using a resumable upload that can cause + * noticeable performance degradation while uploading a series of small + * files. When uploading files less than 10MB, it is recommended that the + * resumable feature is disabled. + *

+ * + * NOTE: Writable streams will emit the `finish` event when the file is fully + * uploaded. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload Upload Options (Simple or Resumable)} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert Objects: insert API Documentation} + * + * @param {CreateWriteStreamOptions} [options] Configuration options. + * @returns {WritableStream} + * + * @example + * ``` + * const fs = require('fs'); + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * //

Uploading a File

+ * // + * // Now, consider a case where we want to upload a file to your bucket. You + * // have the option of using {@link Bucket#upload}, but that is just + * // a convenience method which will do the following. + * //- + * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg') + * .pipe(file.createWriteStream()) + * .on('error', function(err) {}) + * .on('finish', function() { + * // The file upload is complete. + * }); + * + * //- + * //

Uploading a File with gzip compression

+ * //- + * fs.createReadStream('/Users/stephen/site/index.html') + * .pipe(file.createWriteStream({ gzip: true })) + * .on('error', function(err) {}) + * .on('finish', function() { + * // The file upload is complete. + * }); + * + * //- + * // Downloading the file with `createReadStream` will automatically decode + * // the file. + * //- + * + * //- + * //

Uploading a File with Metadata

+ * // + * // One last case you may run into is when you want to upload a file to your + * // bucket and set its metadata at the same time. Like above, you can use + * // {@link Bucket#upload} to do this, which is just a wrapper around + * // the following. + * //- + * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg') + * .pipe(file.createWriteStream({ + * metadata: { + * contentType: 'image/jpeg', + * metadata: { + * custom: 'metadata' + * } + * } + * })) + * .on('error', function(err) {}) + * .on('finish', function() { + * // The file upload is complete. + * }); + * ``` + * + * //- + * //

Continuing a Resumable Upload

+ * // + * // One can capture a `uri` from a resumable upload to reuse later. + * // Additionally, for validation, one can also capture and pass `crc32c`. + * //- + * let uri: string | undefined = undefined; + * let resumeCRC32C: string | undefined = undefined; + * + * fs.createWriteStream() + * .on('uri', link => {uri = link}) + * .on('crc32', crc32c => {resumeCRC32C = crc32c}); + * + * // later... + * fs.createWriteStream({uri, resumeCRC32C}); + */ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + createWriteStream(options = {}) { + var _a; + (_a = options.metadata) !== null && _a !== void 0 ? _a : (options.metadata = {}); + if (options.contentType) { + options.metadata.contentType = options.contentType; + } + if (!options.metadata.contentType || + options.metadata.contentType === 'auto') { + const detectedContentType = mime_1.default.getType(this.name); + if (detectedContentType) { + options.metadata.contentType = detectedContentType; + } + } + let gzip = options.gzip; + if (gzip === 'auto') { + gzip = COMPRESSIBLE_MIME_REGEX.test(options.metadata.contentType || ''); + } + if (gzip) { + options.metadata.contentEncoding = 'gzip'; + } + let crc32c = true; + let md5 = false; + if (typeof options.validation === 'string') { + options.validation = options.validation.toLowerCase(); + crc32c = options.validation === 'crc32c'; + md5 = options.validation === 'md5'; + } + else if (options.validation === false) { + crc32c = false; + md5 = false; + } + if (options.offset) { + if (md5) { + throw new RangeError(FileExceptionMessages.MD5_RESUMED_UPLOAD); + } + if (crc32c && !options.isPartialUpload && !options.resumeCRC32C) { + throw new RangeError(FileExceptionMessages.MISSING_RESUME_CRC32C_FINAL_UPLOAD); + } + } + /** + * A callback for determining when the underlying pipeline is complete. + * It's possible the pipeline callback could error before the write stream + * calls `final` so by default this will destroy the write stream unless the + * write stream sets this callback via its `final` handler. + * @param error An optional error + */ + let pipelineCallback = error => { + writeStream.destroy(error || undefined); + }; + // A stream for consumer to write to + const writeStream = new stream_1.Writable({ + final(cb) { + // Set the pipeline callback to this callback so the pipeline's results + // can be populated to the consumer + pipelineCallback = cb; + emitStream.end(); + }, + write(chunk, encoding, cb) { + emitStream.write(chunk, encoding, cb); + }, + }); + // If the write stream, which is returned to the caller, catches an error we need to make sure that + // at least one of the streams in the pipeline below gets notified so that they + // all get cleaned up / destroyed. + writeStream.once('error', e => { + emitStream.destroy(e); + }); + // If the write stream is closed, cleanup the pipeline below by calling destroy on one of the streams. + writeStream.once('close', () => { + emitStream.destroy(); + }); + const transformStreams = []; + if (gzip) { + transformStreams.push(zlib.createGzip()); + } + const emitStream = new util_js_2.PassThroughShim(); + let hashCalculatingStream = null; + if (crc32c || md5) { + const crc32cInstance = options.resumeCRC32C + ? crc32c_js_1.CRC32C.from(options.resumeCRC32C) + : undefined; + hashCalculatingStream = new hash_stream_validator_js_1.HashStreamValidator({ + crc32c, + crc32cInstance, + md5, + crc32cGenerator: this.crc32cGenerator, + updateHashesOnly: true, + }); + transformStreams.push(hashCalculatingStream); + } + const fileWriteStream = (0, duplexify_1.default)(); + let fileWriteStreamMetadataReceived = false; + // Handing off emitted events to users + emitStream.on('reading', () => writeStream.emit('reading')); + emitStream.on('writing', () => writeStream.emit('writing')); + fileWriteStream.on('uri', evt => writeStream.emit('uri', evt)); + fileWriteStream.on('progress', evt => writeStream.emit('progress', evt)); + fileWriteStream.on('response', resp => writeStream.emit('response', resp)); + fileWriteStream.once('metadata', () => { + fileWriteStreamMetadataReceived = true; + }); + writeStream.once('writing', () => { + if (options.resumable === false) { + this.startSimpleUpload_(fileWriteStream, options); + } + else { + this.startResumableUpload_(fileWriteStream, options); + } + (0, stream_1.pipeline)(emitStream, ...transformStreams, fileWriteStream, async (e) => { + if (e) { + return pipelineCallback(e); + } + // We want to make sure we've received the metadata from the server in order + // to properly validate the object's integrity. Depending on the type of upload, + // the stream could close before the response is returned. + if (!fileWriteStreamMetadataReceived) { + try { + await new Promise((resolve, reject) => { + fileWriteStream.once('metadata', resolve); + fileWriteStream.once('error', reject); + }); + } + catch (e) { + return pipelineCallback(e); + } + } + // Emit the local CRC32C value for future validation, if validation is enabled. + if (hashCalculatingStream === null || hashCalculatingStream === void 0 ? void 0 : hashCalculatingStream.crc32c) { + writeStream.emit('crc32c', hashCalculatingStream.crc32c); + } + try { + // Metadata may not be ready if the upload is a partial upload, + // nothing to validate yet. + const metadataNotReady = options.isPartialUpload && !this.metadata; + if (hashCalculatingStream && !metadataNotReady) { + await __classPrivateFieldGet(this, _File_instances, "m", _File_validateIntegrity).call(this, hashCalculatingStream, { + crc32c, + md5, + }); + } + pipelineCallback(); + } + catch (e) { + pipelineCallback(e); + } + }); + }); + return writeStream; + } + delete(optionsOrCallback, cb) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + this.disableAutoRetryConditionallyIdempotent_(this.methods.delete, bucket_js_1.AvailableServiceObjectMethods.delete, options); + super + .delete(options) + .then(resp => cb(null, ...resp)) + .catch(cb) + .finally(() => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + }); + } + /** + * @typedef {array} DownloadResponse + * @property [0] The contents of a File. + */ + /** + * @callback DownloadCallback + * @param err Request error, if any. + * @param contents The contents of a File. + */ + /** + * Convenience method to download a file into memory or to a local + * destination. + * + * @param {object} [options] Configuration options. The arguments match those + * passed to {@link File#createReadStream}. + * @param {string} [options.destination] Local file path to write the file's + * contents to. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {DownloadCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * // Download a file into memory. The contents will be available as the + * second + * // argument in the demonstration below, `contents`. + * //- + * file.download(function(err, contents) {}); + * + * //- + * // Download a file to a local destination. + * //- + * file.download({ + * destination: '/Users/me/Desktop/file-backup.txt' + * }, function(err) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.download().then(function(data) { + * const contents = data[0]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_download_file + * Another example: + * + * @example include:samples/encryption.js + * region_tag:storage_download_encrypted_file + * Example of downloading an encrypted file: + * + * @example include:samples/requesterPays.js + * region_tag:storage_download_file_requester_pays + * Example of downloading a file where the requester pays: + */ + download(optionsOrCallback, cb) { + let options; + if (typeof optionsOrCallback === 'function') { + cb = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback; + } + let called = false; + const callback = ((...args) => { + if (!called) + cb(...args); + called = true; + }); + const destination = options.destination; + delete options.destination; + const fileStream = this.createReadStream(options); + let receivedData = false; + if (destination) { + fileStream + .on('error', callback) + .once('data', data => { + receivedData = true; + // We know that the file exists the server - now we can truncate/write to a file + const writable = fs.createWriteStream(destination); + writable.write(data); + fileStream + .pipe(writable) + .on('error', (err) => { + callback(err, Buffer.from('')); + }) + .on('finish', () => { + callback(null, data); + }); + }) + .on('end', () => { + // In the case of an empty file no data will be received before the end event fires + if (!receivedData) { + const data = Buffer.alloc(0); + try { + fs.writeFileSync(destination, data); + callback(null, data); + } + catch (e) { + callback(e, data); + } + } + }); + } + else { + this.getBufferFromReadable(fileStream) + .then(contents => callback === null || callback === void 0 ? void 0 : callback(null, contents)) + .catch(callback); + } + } + /** + * The Storage API allows you to use a custom key for server-side encryption. + * + * See {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys} + * + * @param {string|buffer} encryptionKey An AES-256 encryption key. + * @returns {File} + * + * @example + * ``` + * const crypto = require('crypto'); + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const encryptionKey = crypto.randomBytes(32); + * + * const fileWithCustomEncryption = myBucket.file('my-file'); + * fileWithCustomEncryption.setEncryptionKey(encryptionKey); + * + * const fileWithoutCustomEncryption = myBucket.file('my-file'); + * + * fileWithCustomEncryption.save('data', function(err) { + * // Try to download with the File object that hasn't had + * // `setEncryptionKey()` called: + * fileWithoutCustomEncryption.download(function(err) { + * // We will receive an error: + * // err.message === 'Bad Request' + * + * // Try again with the File object we called `setEncryptionKey()` on: + * fileWithCustomEncryption.download(function(err, contents) { + * // contents.toString() === 'data' + * }); + * }); + * }); + * + * ``` + * @example include:samples/encryption.js + * region_tag:storage_upload_encrypted_file + * Example of uploading an encrypted file: + * + * @example include:samples/encryption.js + * region_tag:storage_download_encrypted_file + * Example of downloading an encrypted file: + */ + setEncryptionKey(encryptionKey) { + this.encryptionKey = encryptionKey; + this.encryptionKeyBase64 = Buffer.from(encryptionKey).toString('base64'); + this.encryptionKeyHash = crypto + .createHash('sha256') + // eslint-disable-next-line @typescript-eslint/no-explicit-any + .update(this.encryptionKeyBase64, 'base64') + .digest('base64'); + this.encryptionKeyInterceptor = { + request: reqOpts => { + reqOpts.headers = reqOpts.headers || {}; + reqOpts.headers['x-goog-encryption-algorithm'] = 'AES256'; + reqOpts.headers['x-goog-encryption-key'] = this.encryptionKeyBase64; + reqOpts.headers['x-goog-encryption-key-sha256'] = + this.encryptionKeyHash; + return reqOpts; + }, + }; + this.interceptors.push(this.encryptionKeyInterceptor); + return this; + } + /** + * Gets a reference to a Cloud Storage {@link File} file from the provided URL in string format. + * @param {string} publicUrlOrGsUrl the URL as a string. Must be of the format gs://bucket/file + * or https://storage.googleapis.com/bucket/file. + * @param {Storage} storageInstance an instance of a Storage object. + * @param {FileOptions} [options] Configuration options + * @returns {File} + */ + static from(publicUrlOrGsUrl, storageInstance, options) { + const gsMatches = [...publicUrlOrGsUrl.matchAll(GS_UTIL_URL_REGEX)]; + const httpsMatches = [...publicUrlOrGsUrl.matchAll(HTTPS_PUBLIC_URL_REGEX)]; + if (gsMatches.length > 0) { + const bucket = new bucket_js_1.Bucket(storageInstance, gsMatches[0][2]); + return new File(bucket, gsMatches[0][3], options); + } + else if (httpsMatches.length > 0) { + const bucket = new bucket_js_1.Bucket(storageInstance, httpsMatches[0][3]); + return new File(bucket, httpsMatches[0][4], options); + } + else { + throw new Error('URL string must be of format gs://bucket/file or https://storage.googleapis.com/bucket/file'); + } + } + get(optionsOrCallback, cb) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = + typeof optionsOrCallback === 'function' + ? optionsOrCallback + : cb; + super + .get(options) + .then(resp => cb(null, ...resp)) + .catch(cb); + } + /** + * @typedef {array} GetExpirationDateResponse + * @property {date} 0 A Date object representing the earliest time this file's + * retention policy will expire. + */ + /** + * @callback GetExpirationDateCallback + * @param {?Error} err Request error, if any. + * @param {date} expirationDate A Date object representing the earliest time + * this file's retention policy will expire. + */ + /** + * If this bucket has a retention policy defined, use this method to get a + * Date object representing the earliest time this file will expire. + * + * @param {GetExpirationDateCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const storage = require('@google-cloud/storage')(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.getExpirationDate(function(err, expirationDate) { + * // expirationDate is a Date object. + * }); + * ``` + */ + getExpirationDate(callback) { + this.getMetadata((err, metadata, apiResponse) => { + if (err) { + callback(err, null, apiResponse); + return; + } + if (!metadata.retentionExpirationTime) { + const error = new Error(FileExceptionMessages.EXPIRATION_TIME_NA); + callback(error, null, apiResponse); + return; + } + callback(null, new Date(metadata.retentionExpirationTime), apiResponse); + }); + } + /** + * @typedef {array} GenerateSignedPostPolicyV2Response + * @property {object} 0 The document policy. + */ + /** + * @callback GenerateSignedPostPolicyV2Callback + * @param {?Error} err Request error, if any. + * @param {object} policy The document policy. + */ + /** + * Get a signed policy document to allow a user to upload data with a POST + * request. + * + * In Google Cloud Platform environments, such as Cloud Functions and App + * Engine, you usually don't provide a `keyFilename` or `credentials` during + * instantiation. In those environments, we call the + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} + * to create a signed policy. That API requires either the + * `https://www.googleapis.com/auth/iam` or + * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are + * enabled. + * + * See {@link https://cloud.google.com/storage/docs/xml-api/post-object-v2| POST Object with the V2 signing process} + * + * @throws {Error} If an expiration timestamp from the past is given. + * @throws {Error} If options.equals has an array with less or more than two + * members. + * @throws {Error} If options.startsWith has an array with less or more than two + * members. + * + * @param {object} options Configuration options. + * @param {array|array[]} [options.equals] Array of request parameters and + * their expected value (e.g. [['$', '']]). Values are + * translated into equality constraints in the conditions field of the + * policy document (e.g. ['eq', '$', '']). If only one + * equality condition is to be specified, options.equals can be a one- + * dimensional array (e.g. ['$', '']). + * @param {*} options.expires - A timestamp when this policy will expire. Any + * value given is passed to `new Date()`. + * @param {array|array[]} [options.startsWith] Array of request parameters and + * their expected prefixes (e.g. [['$', '']). Values are + * translated into starts-with constraints in the conditions field of the + * policy document (e.g. ['starts-with', '$', '']). If only + * one prefix condition is to be specified, options.startsWith can be a + * one- dimensional array (e.g. ['$', '']). + * @param {string} [options.acl] ACL for the object from possibly predefined + * ACLs. + * @param {string} [options.successRedirect] The URL to which the user client + * is redirected if the upload is successful. + * @param {string} [options.successStatus] - The status of the Google Storage + * response if the upload is successful (must be string). + * @param {object} [options.contentLengthRange] + * @param {number} [options.contentLengthRange.min] Minimum value for the + * request's content length. + * @param {number} [options.contentLengthRange.max] Maximum value for the + * request's content length. + * @param {GenerateSignedPostPolicyV2Callback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * const options = { + * equals: ['$Content-Type', 'image/jpeg'], + * expires: '10-25-2022', + * contentLengthRange: { + * min: 0, + * max: 1024 + * } + * }; + * + * file.generateSignedPostPolicyV2(options, function(err, policy) { + * // policy.string: the policy document in plain text. + * // policy.base64: the policy document in base64. + * // policy.signature: the policy signature in base64. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.generateSignedPostPolicyV2(options).then(function(data) { + * const policy = data[0]; + * }); + * ``` + */ + generateSignedPostPolicyV2(optionsOrCallback, cb) { + const args = (0, util_js_2.normalize)(optionsOrCallback, cb); + let options = args.options; + const callback = args.callback; + const expires = new Date(options.expires); + if (isNaN(expires.getTime())) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_INVALID); + } + if (expires.valueOf() < Date.now()) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_PAST); + } + options = Object.assign({}, options); + const conditions = [ + ['eq', '$key', this.name], + { + bucket: this.bucket.name, + }, + ]; + if (Array.isArray(options.equals)) { + if (!Array.isArray(options.equals[0])) { + options.equals = [options.equals]; + } + options.equals.forEach(condition => { + if (!Array.isArray(condition) || condition.length !== 2) { + throw new Error(FileExceptionMessages.EQUALS_CONDITION_TWO_ELEMENTS); + } + conditions.push(['eq', condition[0], condition[1]]); + }); + } + if (Array.isArray(options.startsWith)) { + if (!Array.isArray(options.startsWith[0])) { + options.startsWith = [options.startsWith]; + } + options.startsWith.forEach(condition => { + if (!Array.isArray(condition) || condition.length !== 2) { + throw new Error(FileExceptionMessages.STARTS_WITH_TWO_ELEMENTS); + } + conditions.push(['starts-with', condition[0], condition[1]]); + }); + } + if (options.acl) { + conditions.push({ + acl: options.acl, + }); + } + if (options.successRedirect) { + conditions.push({ + success_action_redirect: options.successRedirect, + }); + } + if (options.successStatus) { + conditions.push({ + success_action_status: options.successStatus, + }); + } + if (options.contentLengthRange) { + const min = options.contentLengthRange.min; + const max = options.contentLengthRange.max; + if (typeof min !== 'number' || typeof max !== 'number') { + throw new Error(FileExceptionMessages.CONTENT_LENGTH_RANGE_MIN_MAX); + } + conditions.push(['content-length-range', min, max]); + } + const policy = { + expiration: expires.toISOString(), + conditions, + }; + const policyString = JSON.stringify(policy); + const policyBase64 = Buffer.from(policyString).toString('base64'); + this.storage.authClient.sign(policyBase64, options.signingEndpoint).then(signature => { + callback(null, { + string: policyString, + base64: policyBase64, + signature, + }); + }, err => { + callback(new signer_js_1.SigningError(err.message)); + }); + } + /** + * @typedef {object} SignedPostPolicyV4Output + * @property {string} url The request URL. + * @property {object} fields The form fields to include in the POST request. + */ + /** + * @typedef {array} GenerateSignedPostPolicyV4Response + * @property {SignedPostPolicyV4Output} 0 An object containing the request URL and form fields. + */ + /** + * @callback GenerateSignedPostPolicyV4Callback + * @param {?Error} err Request error, if any. + * @param {SignedPostPolicyV4Output} output An object containing the request URL and form fields. + */ + /** + * Get a v4 signed policy document to allow a user to upload data with a POST + * request. + * + * In Google Cloud Platform environments, such as Cloud Functions and App + * Engine, you usually don't provide a `keyFilename` or `credentials` during + * instantiation. In those environments, we call the + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} + * to create a signed policy. That API requires either the + * `https://www.googleapis.com/auth/iam` or + * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are + * enabled. + * + * See {@link https://cloud.google.com/storage/docs/xml-api/post-object#policydocument| Policy Document Reference} + * + * @param {object} options Configuration options. + * @param {Date|number|string} options.expires - A timestamp when this policy will expire. Any + * value given is passed to `new Date()`. + * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style + * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style + * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs + * should generally be preferred instead of path-style URL. + * Currently defaults to `false` for path-style, although this may change in a + * future major-version release. + * @param {string} [config.bucketBoundHostname] The bucket-bound hostname to return in + * the result, e.g. "https://cdn.example.com". + * @param {object} [config.fields] [Form fields]{@link https://cloud.google.com/storage/docs/xml-api/post-object#policydocument} + * to include in the signed policy. Any fields with key beginning with 'x-ignore-' + * will not be included in the policy to be signed. + * @param {object[]} [config.conditions] [Conditions]{@link https://cloud.google.com/storage/docs/authentication/signatures#policy-document} + * to include in the signed policy. All fields given in `config.fields` are + * automatically included in the conditions array, adding the same entry + * in both `fields` and `conditions` will result in duplicate entries. + * + * @param {GenerateSignedPostPolicyV4Callback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * const options = { + * expires: '10-25-2022', + * conditions: [ + * ['eq', '$Content-Type', 'image/jpeg'], + * ['content-length-range', 0, 1024], + * ], + * fields: { + * acl: 'public-read', + * 'x-goog-meta-foo': 'bar', + * 'x-ignore-mykey': 'data' + * } + * }; + * + * file.generateSignedPostPolicyV4(options, function(err, response) { + * // response.url The request URL + * // response.fields The form fields (including the signature) to include + * // to be used to upload objects by HTML forms. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.generateSignedPostPolicyV4(options).then(function(data) { + * const response = data[0]; + * // response.url The request URL + * // response.fields The form fields (including the signature) to include + * // to be used to upload objects by HTML forms. + * }); + * ``` + */ + generateSignedPostPolicyV4(optionsOrCallback, cb) { + const args = (0, util_js_2.normalize)(optionsOrCallback, cb); + let options = args.options; + const callback = args.callback; + const expires = new Date(options.expires); + if (isNaN(expires.getTime())) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_INVALID); + } + if (expires.valueOf() < Date.now()) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_PAST); + } + if (expires.valueOf() - Date.now() > SEVEN_DAYS * 1000) { + throw new Error(`Max allowed expiration is seven days (${SEVEN_DAYS} seconds).`); + } + options = Object.assign({}, options); + let fields = Object.assign({}, options.fields); + const now = new Date(); + const nowISO = (0, util_js_2.formatAsUTCISO)(now, true); + const todayISO = (0, util_js_2.formatAsUTCISO)(now); + const sign = async () => { + const { client_email } = await this.storage.authClient.getCredentials(); + const credential = `${client_email}/${todayISO}/auto/storage/goog4_request`; + fields = { + ...fields, + bucket: this.bucket.name, + key: this.name, + 'x-goog-date': nowISO, + 'x-goog-credential': credential, + 'x-goog-algorithm': 'GOOG4-RSA-SHA256', + }; + const conditions = options.conditions || []; + Object.entries(fields).forEach(([key, value]) => { + if (!key.startsWith('x-ignore-')) { + conditions.push({ [key]: value }); + } + }); + delete fields.bucket; + const expiration = (0, util_js_2.formatAsUTCISO)(expires, true, '-', ':'); + const policy = { + conditions, + expiration, + }; + const policyString = (0, util_js_2.unicodeJSONStringify)(policy); + const policyBase64 = Buffer.from(policyString).toString('base64'); + try { + const signature = await this.storage.authClient.sign(policyBase64, options.signingEndpoint); + const signatureHex = Buffer.from(signature, 'base64').toString('hex'); + const universe = this.parent.storage.universeDomain; + fields['policy'] = policyBase64; + fields['x-goog-signature'] = signatureHex; + let url; + if (this.storage.customEndpoint) { + url = this.storage.apiEndpoint; + } + else if (options.virtualHostedStyle) { + url = `https://${this.bucket.name}.storage.${universe}/`; + } + else if (options.bucketBoundHostname) { + url = `${options.bucketBoundHostname}/`; + } + else { + url = `https://storage.${universe}/${this.bucket.name}/`; + } + return { + url, + fields, + }; + } + catch (err) { + throw new signer_js_1.SigningError(err.message); + } + }; + sign().then(res => callback(null, res), callback); + } + /** + * @typedef {array} GetSignedUrlResponse + * @property {object} 0 The signed URL. + */ + /** + * @callback GetSignedUrlCallback + * @param {?Error} err Request error, if any. + * @param {object} url The signed URL. + */ + /** + * Get a signed URL to allow limited time access to the file. + * + * In Google Cloud Platform environments, such as Cloud Functions and App + * Engine, you usually don't provide a `keyFilename` or `credentials` during + * instantiation. In those environments, we call the + * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} + * to create a signed URL. That API requires either the + * `https://www.googleapis.com/auth/iam` or + * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are + * enabled. + * + * See {@link https://cloud.google.com/storage/docs/access-control/signed-urls| Signed URLs Reference} + * + * @throws {Error} if an expiration timestamp from the past is given. + * + * @param {object} config Configuration object. + * @param {string} config.action "read" (HTTP: GET), "write" (HTTP: PUT), or + * "delete" (HTTP: DELETE), "resumable" (HTTP: POST). + * When using "resumable", the header `X-Goog-Resumable: start` has + * to be sent when making a request with the signed URL. + * @param {*} config.expires A timestamp when this link will expire. Any value + * given is passed to `new Date()`. + * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now. + * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example} + * @param {string} [config.version='v2'] The signing version to use, either + * 'v2' or 'v4'. + * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style + * URLs (e.g. 'https://mybucket.storage.googleapis.com/...') instead of path-style + * (e.g. 'https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs + * should generally be preferred instaed of path-style URL. + * Currently defaults to `false` for path-style, although this may change in a + * future major-version release. + * @param {string} [config.cname] The cname for this bucket, i.e., + * "https://cdn.example.com". + * @param {string} [config.contentMd5] The MD5 digest value in base64. Just like + * if you provide this, the client must provide this HTTP header with this same + * value in its request, so to if this parameter is not provided here, + * the client must not provide any value for this HTTP header in its request. + * @param {string} [config.contentType] Just like if you provide this, the client + * must provide this HTTP header with this same value in its request, so to if + * this parameter is not provided here, the client must not provide any value + * for this HTTP header in its request. + * @param {object} [config.extensionHeaders] If these headers are used, the + * server will check to make sure that the client provides matching + * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers} + * for the requirements of this feature, most notably: + * - The header name must be prefixed with `x-goog-` + * - The header name must be all lowercase + * + * Note: Multi-valued header passed as an array in the extensionHeaders + * object is converted into a string, delimited by `,` with + * no space. Requests made using the signed URL will need to + * delimit multi-valued headers using a single `,` as well, or + * else the server will report a mismatched signature. + * @param {object} [config.queryParams] Additional query parameters to include + * in the signed URL. + * @param {string} [config.promptSaveAs] The filename to prompt the user to + * save the file as when the signed url is accessed. This is ignored if + * `config.responseDisposition` is set. + * @param {string} [config.responseDisposition] The + * {@link http://goo.gl/yMWxQV| response-content-disposition parameter} of the + * signed url. + * @param {*} [config.accessibleAt=Date.now()] A timestamp when this link became usable. Any value + * given is passed to `new Date()`. + * Note: Use for 'v4' only. + * @param {string} [config.responseType] The response-content-type parameter + * of the signed url. + * @param {GetSignedUrlCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * // Generate a URL that allows temporary access to download your file. + * //- + * const request = require('request'); + * + * const config = { + * action: 'read', + * expires: '03-17-2025', + * }; + * + * file.getSignedUrl(config, function(err, url) { + * if (err) { + * console.error(err); + * return; + * } + * + * // The file is now available to read from this URL. + * request(url, function(err, resp) { + * // resp.statusCode = 200 + * }); + * }); + * + * //- + * // Generate a URL that allows temporary access to download your file. + * // Access will begin at accessibleAt and end at expires. + * //- + * const request = require('request'); + * + * const config = { + * action: 'read', + * expires: '03-17-2025', + * accessibleAt: '03-13-2025' + * }; + * + * file.getSignedUrl(config, function(err, url) { + * if (err) { + * console.error(err); + * return; + * } + * + * // The file will be available to read from this URL from 03-13-2025 to 03-17-2025. + * request(url, function(err, resp) { + * // resp.statusCode = 200 + * }); + * }); + * + * //- + * // Generate a URL to allow write permissions. This means anyone with this + * URL + * // can send a POST request with new data that will overwrite the file. + * //- + * file.getSignedUrl({ + * action: 'write', + * expires: '03-17-2025' + * }, function(err, url) { + * if (err) { + * console.error(err); + * return; + * } + * + * // The file is now available to be written to. + * const writeStream = request.put(url); + * writeStream.end('New data'); + * + * writeStream.on('complete', function(resp) { + * // Confirm the new content was saved. + * file.download(function(err, fileContents) { + * console.log('Contents:', fileContents.toString()); + * // Contents: New data + * }); + * }); + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.getSignedUrl(config).then(function(data) { + * const url = data[0]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_generate_signed_url + * Another example: + */ + getSignedUrl(cfg, callback) { + const method = ActionToHTTPMethod[cfg.action]; + const extensionHeaders = (0, util_js_2.objectKeyToLowercase)(cfg.extensionHeaders || {}); + if (cfg.action === 'resumable') { + extensionHeaders['x-goog-resumable'] = 'start'; + } + const queryParams = Object.assign({}, cfg.queryParams); + if (typeof cfg.responseType === 'string') { + queryParams['response-content-type'] = cfg.responseType; + } + if (typeof cfg.promptSaveAs === 'string') { + queryParams['response-content-disposition'] = + 'attachment; filename="' + cfg.promptSaveAs + '"'; + } + if (typeof cfg.responseDisposition === 'string') { + queryParams['response-content-disposition'] = cfg.responseDisposition; + } + if (this.generation) { + queryParams['generation'] = this.generation.toString(); + } + const signConfig = { + method, + expires: cfg.expires, + accessibleAt: cfg.accessibleAt, + extensionHeaders, + queryParams, + contentMd5: cfg.contentMd5, + contentType: cfg.contentType, + host: cfg.host, + }; + if (cfg.cname) { + signConfig.cname = cfg.cname; + } + if (cfg.version) { + signConfig.version = cfg.version; + } + if (cfg.virtualHostedStyle) { + signConfig.virtualHostedStyle = cfg.virtualHostedStyle; + } + if (!this.signer) { + this.signer = new signer_js_1.URLSigner(this.storage.authClient, this.bucket, this, this.storage); + } + this.signer + .getSignedUrl(signConfig) + .then(signedUrl => callback(null, signedUrl), callback); + } + /** + * @callback IsPublicCallback + * @param {?Error} err Request error, if any. + * @param {boolean} resp Whether file is public or not. + */ + /** + * @typedef {array} IsPublicResponse + * @property {boolean} 0 Whether file is public or not. + */ + /** + * Check whether this file is public or not by sending + * a HEAD request without credentials. + * No errors from the server indicates that the current + * file is public. + * A 403-Forbidden error {@link https://cloud.google.com/storage/docs/json_api/v1/status-codes#403_Forbidden} + * indicates that file is private. + * Any other non 403 error is propagated to user. + * + * @param {IsPublicCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * // Check whether the file is publicly accessible. + * //- + * file.isPublic(function(err, resp) { + * if (err) { + * console.error(err); + * return; + * } + * console.log(`the file ${file.id} is public: ${resp}`) ; + * }) + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.isPublic().then(function(data) { + * const resp = data[0]; + * }); + * ``` + */ + isPublic(callback) { + var _a; + // Build any custom headers based on the defined interceptors on the parent + // storage object and this object + const storageInterceptors = ((_a = this.storage) === null || _a === void 0 ? void 0 : _a.interceptors) || []; + const fileInterceptors = this.interceptors || []; + const allInterceptors = storageInterceptors.concat(fileInterceptors); + const headers = allInterceptors.reduce((acc, curInterceptor) => { + const currentHeaders = curInterceptor.request({ + uri: `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`, + }); + Object.assign(acc, currentHeaders.headers); + return acc; + }, {}); + index_js_1.util.makeRequest({ + method: 'GET', + uri: `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`, + headers, + }, { + retryOptions: this.storage.retryOptions, + }, (err) => { + if (err) { + const apiError = err; + if (apiError.code === 403) { + callback(null, false); + } + else { + callback(err); + } + } + else { + callback(null, true); + } + }); + } + /** + * @typedef {object} MakeFilePrivateOptions Configuration options for File#makePrivate(). + * @property {Metadata} [metadata] Define custom metadata properties to define + * along with the operation. + * @property {boolean} [strict] If true, set the file to be private to + * only the owner user. Otherwise, it will be private to the project. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback MakeFilePrivateCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} MakeFilePrivateResponse + * @property {object} 0 The full API response. + */ + /** + * Make a file private to the project and remove all other permissions. + * Set `options.strict` to true to make the file private to only the owner. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/patch| Objects: patch API Documentation} + * + * @param {MakeFilePrivateOptions} [options] Configuration options. + * @param {MakeFilePrivateCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * //- + * // Set the file private so only project maintainers can see and modify it. + * //- + * file.makePrivate(function(err) {}); + * + * //- + * // Set the file private so only the owner can see and modify it. + * //- + * file.makePrivate({ strict: true }, function(err) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.makePrivate().then(function(data) { + * const apiResponse = data[0]; + * }); + * ``` + */ + makePrivate(optionsOrCallback, callback) { + var _a, _b; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const query = { + predefinedAcl: options.strict ? 'private' : 'projectPrivate', + // eslint-disable-next-line @typescript-eslint/no-explicit-any + }; + if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifMetagenerationMatch) !== undefined) { + query.ifMetagenerationMatch = + (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifMetagenerationMatch; + delete options.preconditionOpts; + } + if (options.userProject) { + query.userProject = options.userProject; + } + // You aren't allowed to set both predefinedAcl & acl properties on a file, + // so acl must explicitly be nullified, destroying all previous acls on the + // file. + const metadata = { ...options.metadata, acl: null }; + this.setMetadata(metadata, query, callback); + } + /** + * @typedef {array} MakeFilePublicResponse + * @property {object} 0 The full API response. + */ + /** + * @callback MakeFilePublicCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Set a file to be publicly readable and maintain all previous permissions. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/insert| ObjectAccessControls: insert API Documentation} + * + * @param {MakeFilePublicCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * + * file.makePublic(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.makePublic().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_make_public + * Another example: + */ + makePublic(callback) { + callback = callback || index_js_1.util.noop; + this.acl.add({ + entity: 'allUsers', + role: 'READER', + }, (err, acl, resp) => { + callback(err, resp); + }); + } + /** + * The public URL of this File + * Use {@link File#makePublic} to enable anonymous access via the returned URL. + * + * @returns {string} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const file = bucket.file('my-file'); + * + * // publicUrl will be "https://storage.googleapis.com/albums/my-file" + * const publicUrl = file.publicUrl(); + * ``` + */ + publicUrl() { + return `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`; + } + /** + * @typedef {array} MoveFileAtomicResponse + * @property {File} 0 The moved {@link File}. + * @property {object} 1 The full API response. + */ + /** + * @callback MoveFileAtomicCallback + * @param {?Error} err Request error, if any. + * @param {File} movedFile The moved {@link File}. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} MoveFileAtomicOptions Configuration options for File#moveFileAtomic(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + * @property {object} [preconditionOpts] Precondition options. + * @property {number} [preconditionOpts.ifGenerationMatch] Makes the operation conditional on whether the object's current generation matches the given value. + */ + /** + * Move this file within the same HNS-enabled bucket. + * The source object must exist and be a live object. + * The source and destination object IDs must be different. + * Overwriting the destination object is allowed by default, but can be prevented + * using preconditions. + * If the destination path includes non-existent parent folders, they will be created. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/move| Objects: move API Documentation} + * + * @throws {Error} If the destination file is not provided. + * + * @param {string|File} destination Destination file name or File object within the same bucket.. + * @param {MoveFileAtomicOptions} [options] Configuration options. See an + * @param {MoveFileAtomicCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Assume 'my-hns-bucket' is an HNS-enabled bucket. + * //- + * const bucket = storage.bucket('my-hns-bucket'); + * const file = bucket.file('my-image.png'); + * + * //- + * // If you pass in a string for the destination, the file is copied to its + * // current bucket, under the new name provided. + * //- + * file.moveFileAtomic('moved-image.png', function(err, movedFile, apiResponse) { + * // `my-hns-bucket` now contains: + * // - "moved-image.png" + * + * // `movedFile` is an instance of a File object that refers to your new + * // file. + * }); + * + * //- + * // Move the file to a subdirectory, creating parent folders if necessary. + * //- + * file.moveFileAtomic('new-folder/subfolder/moved-image.png', function(err, movedFile, apiResponse) { + * // `my-hns-bucket` now contains: + * // - "new-folder/subfolder/moved-image.png" + * }); + * + * //- + * // Prevent overwriting an existing destination object using preconditions. + * //- + * file.moveFileAtomic('existing-destination.png', { + * preconditionOpts: { + * ifGenerationMatch: 0 // Fails if the destination object exists. + * } + * }, function(err, movedFile, apiResponse) { + * if (err) { + * // Handle the error (e.g., the destination object already exists). + * } else { + * // Move successful. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.moveFileAtomic('moved-image.png).then(function(data) { + * const newFile = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_move_file_hns + * Another example: + */ + moveFileAtomic(destination, optionsOrCallback, callback) { + var _a, _b; + const noDestinationError = new Error(FileExceptionMessages.DESTINATION_NO_NAME); + if (!destination) { + throw noDestinationError; + } + let options = {}; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + else if (optionsOrCallback) { + options = { ...optionsOrCallback }; + } + callback = callback || index_js_1.util.noop; + let destName; + let newFile; + if (typeof destination === 'string') { + const parsedDestination = GS_URL_REGEXP.exec(destination); + if (parsedDestination !== null && parsedDestination.length === 3) { + destName = parsedDestination[2]; + } + else { + destName = destination; + } + } + else if (destination instanceof File) { + destName = destination.name; + newFile = destination; + } + else { + throw noDestinationError; + } + newFile = newFile || this.bucket.file(destName); + if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) { + this.storage.retryOptions.autoRetry = false; + } + const query = {}; + if (options.userProject !== undefined) { + query.userProject = options.userProject; + delete options.userProject; + } + if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined) { + query.ifGenerationMatch = (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch; + delete options.preconditionOpts; + } + this.request({ + method: 'POST', + uri: `/moveTo/o/${encodeURIComponent(newFile.name)}`, + qs: query, + json: options, + }, (err, resp) => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + if (err) { + callback(err, null, resp); + return; + } + callback(null, newFile, resp); + }); + } + /** + * @typedef {array} MoveResponse + * @property {File} 0 The destination File. + * @property {object} 1 The full API response. + */ + /** + * @callback MoveCallback + * @param {?Error} err Request error, if any. + * @param {?File} destinationFile The destination File. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} MoveOptions Configuration options for File#move(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Move this file to another location. By default, this will rename the file + * and keep it in the same bucket, but you can choose to move it to another + * Bucket by providing a Bucket or File object or a URL beginning with + * "gs://". + * + * **Warning**: + * There is currently no atomic `move` method in the Cloud Storage API, + * so this method is a composition of {@link File#copy} (to the new + * location) and {@link File#delete} (from the old location). While + * unlikely, it is possible that an error returned to your callback could be + * triggered from either one of these API calls failing, which could leave a + * duplicate file lingering. The error message will indicate what operation + * has failed. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/copy| Objects: copy API Documentation} + * + * @throws {Error} If the destination file is not provided. + * + * @param {string|Bucket|File} destination Destination file. + * @param {MoveCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * //- + * // You can pass in a variety of types for the destination. + * // + * // For all of the below examples, assume we are working with the following + * // Bucket and File objects. + * //- + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('my-image.png'); + * + * //- + * // If you pass in a string for the destination, the file is moved to its + * // current bucket, under the new name provided. + * //- + * file.move('my-image-new.png', function(err, destinationFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // but contains instead: + * // - "my-image-new.png" + * + * // `destinationFile` is an instance of a File object that refers to your + * // new file. + * }); + * + * //- + * // If you pass in a string starting with "gs://" for the destination, the + * // file is copied to the other bucket and under the new name provided. + * //- + * const newLocation = 'gs://another-bucket/my-image-new.png'; + * file.move(newLocation, function(err, destinationFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-image-new.png" + * + * // `destinationFile` is an instance of a File object that refers to your + * // new file. + * }); + * + * //- + * // If you pass in a Bucket object, the file will be moved to that bucket + * // using the same name. + * //- + * const anotherBucket = gcs.bucket('another-bucket'); + * + * file.move(anotherBucket, function(err, destinationFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-image.png" + * + * // `destinationFile` is an instance of a File object that refers to your + * // new file. + * }); + * + * //- + * // If you pass in a File object, you have complete control over the new + * // bucket and filename. + * //- + * const anotherFile = anotherBucket.file('my-awesome-image.png'); + * + * file.move(anotherFile, function(err, destinationFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // + * // `another-bucket` now contains: + * // - "my-awesome-image.png" + * + * // Note: + * // The `destinationFile` parameter is equal to `anotherFile`. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.move('my-image-new.png').then(function(data) { + * const destinationFile = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/files.js + * region_tag:storage_move_file + * Another example: + */ + move(destination, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + callback = callback || index_js_1.util.noop; + this.copy(destination, options, (err, destinationFile, copyApiResponse) => { + if (err) { + err.message = 'file#copy failed with an error - ' + err.message; + callback(err, null, copyApiResponse); + return; + } + if (this.name !== destinationFile.name || + this.bucket.name !== destinationFile.bucket.name) { + this.delete(options, (err, apiResponse) => { + if (err) { + err.message = 'file#delete failed with an error - ' + err.message; + callback(err, destinationFile, apiResponse); + return; + } + callback(null, destinationFile, copyApiResponse); + }); + } + else { + callback(null, destinationFile, copyApiResponse); + } + }); + } + /** + * @typedef {array} RenameResponse + * @property {File} 0 The destination File. + * @property {object} 1 The full API response. + */ + /** + * @callback RenameCallback + * @param {?Error} err Request error, if any. + * @param {?File} destinationFile The destination File. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {object} RenameOptions Configuration options for File#move(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * Rename this file. + * + * **Warning**: + * There is currently no atomic `rename` method in the Cloud Storage API, + * so this method is an alias of {@link File#move}, which in turn is a + * composition of {@link File#copy} (to the new location) and + * {@link File#delete} (from the old location). While + * unlikely, it is possible that an error returned to your callback could be + * triggered from either one of these API calls failing, which could leave a + * duplicate file lingering. The error message will indicate what operation + * has failed. + * + * @param {string|File} destinationFile Destination file. + * @param {RenameCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // You can pass in a string or a File object. + * // + * // For all of the below examples, assume we are working with the following + * // Bucket and File objects. + * //- + * + * const bucket = storage.bucket('my-bucket'); + * const file = bucket.file('my-image.png'); + * + * //- + * // You can pass in a string for the destinationFile. + * //- + * file.rename('renamed-image.png', function(err, renamedFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * // but contains instead: + * // - "renamed-image.png" + * + * // `renamedFile` is an instance of a File object that refers to your + * // renamed file. + * }); + * + * //- + * // You can pass in a File object. + * //- + * const anotherFile = anotherBucket.file('my-awesome-image.png'); + * + * file.rename(anotherFile, function(err, renamedFile, apiResponse) { + * // `my-bucket` no longer contains: + * // - "my-image.png" + * + * // Note: + * // The `renamedFile` parameter is equal to `anotherFile`. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.rename('my-renamed-image.png').then(function(data) { + * const renamedFile = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + rename(destinationFile, optionsOrCallback, callback) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + callback = callback || index_js_1.util.noop; + this.move(destinationFile, options, callback); + } + /** + * @typedef {object} RestoreOptions Options for File#restore(). See an + * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. + * @param {string} [userProject] The ID of the project which will be + * billed for the request. + * @param {number} [generation] If present, selects a specific revision of this object. + * @param {string} [restoreToken] Returns an option that must be specified when getting a soft-deleted object from an HNS-enabled + * bucket that has a naming and generation conflict with another object in the same bucket. + * @param {string} [projection] Specifies the set of properties to return. If used, must be 'full' or 'noAcl'. + * @param {string | number} [ifGenerationMatch] Request proceeds if the generation of the target resource + * matches the value used in the precondition. + * If the values don't match, the request fails with a 412 Precondition Failed response. + * @param {string | number} [ifGenerationNotMatch] Request proceeds if the generation of the target resource does + * not match the value used in the precondition. If the values match, the request fails with a 304 Not Modified response. + * @param {string | number} [ifMetagenerationMatch] Request proceeds if the meta-generation of the target resource + * matches the value used in the precondition. + * If the values don't match, the request fails with a 412 Precondition Failed response. + * @param {string | number} [ifMetagenerationNotMatch] Request proceeds if the meta-generation of the target resource does + * not match the value used in the precondition. If the values match, the request fails with a 304 Not Modified response. + */ + /** + * Restores a soft-deleted file + * @param {RestoreOptions} options Restore options. + * @returns {Promise} + */ + async restore(options) { + const [file] = await this.request({ + method: 'POST', + uri: '/restore', + qs: options, + }); + return file; + } + /** + * Makes request and applies userProject query parameter if necessary. + * + * @private + * + * @param {object} reqOpts - The request options. + * @param {function} callback - The callback function. + */ + request(reqOpts, callback) { + return this.parent.request.call(this, reqOpts, callback); + } + /** + * @callback RotateEncryptionKeyCallback + * @extends CopyCallback + */ + /** + * @typedef RotateEncryptionKeyResponse + * @extends CopyResponse + */ + /** + * @param {string|buffer|object} RotateEncryptionKeyOptions Configuration options + * for File#rotateEncryptionKey(). + * If a string or Buffer is provided, it is interpreted as an AES-256, + * customer-supplied encryption key. If you'd like to use a Cloud KMS key + * name, you must specify an options object with the property name: + * `kmsKeyName`. + * @param {string|buffer} [options.encryptionKey] An AES-256 encryption key. + * @param {string} [options.kmsKeyName] A Cloud KMS key name. + */ + /** + * This method allows you to update the encryption key associated with this + * file. + * + * See {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys} + * + * @param {RotateEncryptionKeyOptions} [options] - Configuration options. + * @param {RotateEncryptionKeyCallback} [callback] + * @returns {Promise} + * + * @example include:samples/encryption.js + * region_tag:storage_rotate_encryption_key + * Example of rotating the encryption key for this file: + */ + rotateEncryptionKey(optionsOrCallback, callback) { + var _a; + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + let options = {}; + if (typeof optionsOrCallback === 'string' || + optionsOrCallback instanceof Buffer) { + options = { + encryptionKey: optionsOrCallback, + }; + } + else if (typeof optionsOrCallback === 'object') { + options = optionsOrCallback; + } + const newFile = this.bucket.file(this.id, options); + const copyOptions = ((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined + ? { preconditionOpts: options.preconditionOpts } + : {}; + this.copy(newFile, copyOptions, callback); + } + /** + * @typedef {object} SaveOptions + * @extends CreateWriteStreamOptions + */ + /** + * @callback SaveCallback + * @param {?Error} err Request error, if any. + */ + /** + * Write strings or buffers to a file. + * + * *This is a convenience method which wraps {@link File#createWriteStream}.* + * To upload arbitrary data to a file, please use {@link File#createWriteStream} directly. + * + * Resumable uploads are automatically enabled and must be shut off explicitly + * by setting `options.resumable` to `false`. + * + * Multipart uploads with retryable error codes will be retried 3 times with exponential backoff. + * + *

+ * There is some overhead when using a resumable upload that can cause + * noticeable performance degradation while uploading a series of small + * files. When uploading files less than 10MB, it is recommended that the + * resumable feature is disabled. + *

+ * + * @param {SaveData} data The data to write to a file. + * @param {SaveOptions} [options] See {@link File#createWriteStream}'s `options` + * parameter. + * @param {SaveCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const file = myBucket.file('my-file'); + * const contents = 'This is the contents of the file.'; + * + * file.save(contents, function(err) { + * if (!err) { + * // File written successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.save(contents).then(function() {}); + * ``` + */ + save(data, optionsOrCallback, callback) { + // tslint:enable:no-any + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + let maxRetries = this.storage.retryOptions.maxRetries; + if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) { + maxRetries = 0; + } + const returnValue = (0, async_retry_1.default)(async (bail) => { + return new Promise((resolve, reject) => { + if (maxRetries === 0) { + this.storage.retryOptions.autoRetry = false; + } + const writable = this.createWriteStream(options); + if (options.onUploadProgress) { + writable.on('progress', options.onUploadProgress); + } + const handleError = (err) => { + if (this.storage.retryOptions.autoRetry && + this.storage.retryOptions.retryableErrorFn(err)) { + return reject(err); + } + return bail(err); + }; + if (typeof data === 'string' || + Buffer.isBuffer(data) || + data instanceof Uint8Array) { + writable + .on('error', handleError) + .on('finish', () => resolve()) + .end(data); + } + else { + (0, stream_1.pipeline)(data, writable, err => { + if (err) { + if (typeof data !== 'function') { + // Only PipelineSourceFunction can be retried. Async-iterables + // and Readable streams can only be consumed once. + return bail(err); + } + handleError(err); + } + else { + resolve(); + } + }); + } + }); + }, { + retries: maxRetries, + factor: this.storage.retryOptions.retryDelayMultiplier, + maxTimeout: this.storage.retryOptions.maxRetryDelay * 1000, //convert to milliseconds + maxRetryTime: this.storage.retryOptions.totalTimeout * 1000, //convert to milliseconds + }); + if (!callback) { + return returnValue; + } + else { + return returnValue + .then(() => { + if (callback) { + return callback(); + } + }) + .catch(callback); + } + } + setMetadata(metadata, optionsOrCallback, cb) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = + typeof optionsOrCallback === 'function' + ? optionsOrCallback + : cb; + this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, bucket_js_1.AvailableServiceObjectMethods.setMetadata, options); + super + .setMetadata(metadata, options) + .then(resp => cb(null, ...resp)) + .catch(cb) + .finally(() => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + }); + } + /** + * @typedef {array} SetStorageClassResponse + * @property {object} 0 The full API response. + */ + /** + * @typedef {object} SetStorageClassOptions Configuration options for File#setStorageClass(). + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @callback SetStorageClassCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Set the storage class for this file. + * + * See {@link https://cloud.google.com/storage/docs/per-object-storage-class| Per-Object Storage Class} + * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes} + * + * @param {string} storageClass The new storage class. (`standard`, + * `nearline`, `coldline`, or `archive`) + * **Note:** The storage classes `multi_regional` and `regional` + * are now legacy and will be deprecated in the future. + * @param {SetStorageClassOptions} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {SetStorageClassCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * file.setStorageClass('nearline', function(err, apiResponse) { + * if (err) { + * // Error handling omitted. + * } + * + * // The storage class was updated successfully. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * file.setStorageClass('nearline').then(function() {}); + * ``` + */ + setStorageClass(storageClass, optionsOrCallback, callback) { + callback = + typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + const req = { + ...options, + // In case we get input like `storageClass`, convert to `storage_class`. + storageClass: storageClass + .replace(/-/g, '_') + .replace(/([a-z])([A-Z])/g, (_, low, up) => { + return low + '_' + up; + }) + .toUpperCase(), + }; + this.copy(this, req, (err, file, apiResponse) => { + if (err) { + callback(err, apiResponse); + return; + } + this.metadata = file.metadata; + callback(null, apiResponse); + }); + } + /** + * Set a user project to be billed for all requests made from this File + * object. + * + * @param {string} userProject The user project. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('albums'); + * const file = bucket.file('my-file'); + * + * file.setUserProject('grape-spaceship-123'); + * ``` + */ + setUserProject(userProject) { + this.bucket.setUserProject.call(this, userProject); + } + /** + * This creates a resumable-upload upload stream. + * + * @param {Duplexify} stream - Duplexify stream of data to pipe to the file. + * @param {object=} options - Configuration object. + * + * @private + */ + startResumableUpload_(dup, options = {}) { + var _a; + (_a = options.metadata) !== null && _a !== void 0 ? _a : (options.metadata = {}); + const retryOptions = this.storage.retryOptions; + if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options.preconditionOpts)) { + retryOptions.autoRetry = false; + } + const cfg = { + authClient: this.storage.authClient, + apiEndpoint: this.storage.apiEndpoint, + bucket: this.bucket.name, + customRequestOptions: this.getRequestInterceptors().reduce((reqOpts, interceptorFn) => interceptorFn(reqOpts), {}), + file: this.name, + generation: this.generation, + isPartialUpload: options.isPartialUpload, + key: this.encryptionKey, + kmsKeyName: this.kmsKeyName, + metadata: options.metadata, + offset: options.offset, + predefinedAcl: options.predefinedAcl, + private: options.private, + public: options.public, + uri: options.uri, + userProject: options.userProject || this.userProject, + retryOptions: { ...retryOptions }, + params: (options === null || options === void 0 ? void 0 : options.preconditionOpts) || this.instancePreconditionOpts, + chunkSize: options === null || options === void 0 ? void 0 : options.chunkSize, + highWaterMark: options === null || options === void 0 ? void 0 : options.highWaterMark, + universeDomain: this.bucket.storage.universeDomain, + [util_js_1.GCCL_GCS_CMD_KEY]: options[util_js_1.GCCL_GCS_CMD_KEY], + }; + let uploadStream; + try { + uploadStream = resumableUpload.upload(cfg); + } + catch (error) { + dup.destroy(error); + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + return; + } + uploadStream + .on('response', resp => { + dup.emit('response', resp); + }) + .on('uri', uri => { + dup.emit('uri', uri); + }) + .on('metadata', metadata => { + this.metadata = metadata; + dup.emit('metadata'); + }) + .on('finish', () => { + dup.emit('complete'); + }) + .on('progress', evt => dup.emit('progress', evt)); + dup.setWritable(uploadStream); + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + } + /** + * Takes a readable stream and pipes it to a remote file. Unlike + * `startResumableUpload_`, which uses the resumable upload technique, this + * method uses a simple upload (all or nothing). + * + * @param {Duplexify} dup - Duplexify stream of data to pipe to the file. + * @param {object=} options - Configuration object. + * + * @private + */ + startSimpleUpload_(dup, options = {}) { + var _a; + (_a = options.metadata) !== null && _a !== void 0 ? _a : (options.metadata = {}); + const apiEndpoint = this.storage.apiEndpoint; + const bucketName = this.bucket.name; + const uri = `${apiEndpoint}/upload/storage/v1/b/${bucketName}/o`; + const reqOpts = { + qs: { + name: this.name, + }, + uri: uri, + [util_js_1.GCCL_GCS_CMD_KEY]: options[util_js_1.GCCL_GCS_CMD_KEY], + }; + if (this.generation !== undefined) { + reqOpts.qs.ifGenerationMatch = this.generation; + } + if (this.kmsKeyName !== undefined) { + reqOpts.qs.kmsKeyName = this.kmsKeyName; + } + if (typeof options.timeout === 'number') { + reqOpts.timeout = options.timeout; + } + if (options.userProject || this.userProject) { + reqOpts.qs.userProject = options.userProject || this.userProject; + } + if (options.predefinedAcl) { + reqOpts.qs.predefinedAcl = options.predefinedAcl; + } + else if (options.private) { + reqOpts.qs.predefinedAcl = 'private'; + } + else if (options.public) { + reqOpts.qs.predefinedAcl = 'publicRead'; + } + Object.assign(reqOpts.qs, this.instancePreconditionOpts, options.preconditionOpts); + index_js_1.util.makeWritableStream(dup, { + makeAuthenticatedRequest: (reqOpts) => { + this.request(reqOpts, (err, body, resp) => { + if (err) { + dup.destroy(err); + return; + } + this.metadata = body; + dup.emit('metadata', body); + dup.emit('response', resp); + dup.emit('complete'); + }); + }, + metadata: options.metadata, + request: reqOpts, + }); + } + disableAutoRetryConditionallyIdempotent_( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + coreOpts, methodType, localPreconditionOptions) { + var _a, _b, _c, _d; + if ((typeof coreOpts === 'object' && + ((_b = (_a = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _a === void 0 ? void 0 : _a.qs) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined && + (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifGenerationMatch) === undefined && + methodType === bucket_js_1.AvailableServiceObjectMethods.delete && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + this.storage.retryOptions.autoRetry = false; + } + if ((typeof coreOpts === 'object' && + ((_d = (_c = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _c === void 0 ? void 0 : _c.qs) === null || _d === void 0 ? void 0 : _d.ifMetagenerationMatch) === undefined && + (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifMetagenerationMatch) === undefined && + methodType === bucket_js_1.AvailableServiceObjectMethods.setMetadata && + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryConditional) || + this.storage.retryOptions.idempotencyStrategy === + storage_js_1.IdempotencyStrategy.RetryNever) { + this.storage.retryOptions.autoRetry = false; + } + } + async getBufferFromReadable(readable) { + const buf = []; + for await (const chunk of readable) { + buf.push(chunk); + } + return Buffer.concat(buf); + } +} +exports.File = File; +_File_instances = new WeakSet(), _File_validateIntegrity = +/** + * + * @param hashCalculatingStream + * @param verify + * @returns {boolean} Returns `true` if valid, throws with error otherwise + */ +async function _File_validateIntegrity(hashCalculatingStream, verify = {}) { + const metadata = this.metadata; + // If we're doing validation, assume the worst + let dataMismatch = !!(verify.crc32c || verify.md5); + if (verify.crc32c && metadata.crc32c) { + dataMismatch = !hashCalculatingStream.test('crc32c', metadata.crc32c); + } + if (verify.md5 && metadata.md5Hash) { + dataMismatch = !hashCalculatingStream.test('md5', metadata.md5Hash); + } + if (dataMismatch) { + const errors = []; + let code = ''; + let message = ''; + try { + await this.delete(); + if (verify.md5 && !metadata.md5Hash) { + code = 'MD5_NOT_AVAILABLE'; + message = FileExceptionMessages.MD5_NOT_AVAILABLE; + } + else { + code = 'FILE_NO_UPLOAD'; + message = FileExceptionMessages.UPLOAD_MISMATCH; + } + } + catch (e) { + const error = e; + code = 'FILE_NO_UPLOAD_DELETE'; + message = `${FileExceptionMessages.UPLOAD_MISMATCH_DELETE_FAIL}${error.message}`; + errors.push(error); + } + const error = new RequestError(message); + error.code = code; + error.errors = errors; + throw error; + } + return true; +}; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(File, { + exclude: [ + 'cloudStorageURI', + 'publicUrl', + 'request', + 'save', + 'setEncryptionKey', + 'shouldRetryBasedOnPreconditionAndIdempotencyStrat', + 'getBufferFromReadable', + 'restore', + ], +}); + + +/***/ }), + +/***/ 4873: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _HashStreamValidator_crc32cHash, _HashStreamValidator_md5Hash, _HashStreamValidator_md5Digest; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HashStreamValidator = void 0; +const crypto_1 = __nccwpck_require__(6982); +const stream_1 = __nccwpck_require__(2203); +const crc32c_js_1 = __nccwpck_require__(4317); +const file_js_1 = __nccwpck_require__(9975); +class HashStreamValidator extends stream_1.Transform { + constructor(options = {}) { + super(); + this.updateHashesOnly = false; + _HashStreamValidator_crc32cHash.set(this, undefined); + _HashStreamValidator_md5Hash.set(this, undefined); + _HashStreamValidator_md5Digest.set(this, ''); + this.crc32cEnabled = !!options.crc32c; + this.md5Enabled = !!options.md5; + this.updateHashesOnly = !!options.updateHashesOnly; + this.crc32cExpected = options.crc32cExpected; + this.md5Expected = options.md5Expected; + if (this.crc32cEnabled) { + if (options.crc32cInstance) { + __classPrivateFieldSet(this, _HashStreamValidator_crc32cHash, options.crc32cInstance, "f"); + } + else { + const crc32cGenerator = options.crc32cGenerator || crc32c_js_1.CRC32C_DEFAULT_VALIDATOR_GENERATOR; + __classPrivateFieldSet(this, _HashStreamValidator_crc32cHash, crc32cGenerator(), "f"); + } + } + if (this.md5Enabled) { + __classPrivateFieldSet(this, _HashStreamValidator_md5Hash, (0, crypto_1.createHash)('md5'), "f"); + } + } + /** + * Return the current CRC32C value, if available. + */ + get crc32c() { + var _a; + return (_a = __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f")) === null || _a === void 0 ? void 0 : _a.toString(); + } + _flush(callback) { + if (__classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f")) { + __classPrivateFieldSet(this, _HashStreamValidator_md5Digest, __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f").digest('base64'), "f"); + } + if (this.updateHashesOnly) { + callback(); + return; + } + // If we're doing validation, assume the worst-- a data integrity + // mismatch. If not, these tests won't be performed, and we can assume + // the best. + // We must check if the server decompressed the data on serve because hash + // validation is not possible in this case. + let failed = this.crc32cEnabled || this.md5Enabled; + if (this.crc32cEnabled && this.crc32cExpected) { + failed = !this.test('crc32c', this.crc32cExpected); + } + if (this.md5Enabled && this.md5Expected) { + failed = !this.test('md5', this.md5Expected); + } + if (failed) { + const mismatchError = new file_js_1.RequestError(file_js_1.FileExceptionMessages.DOWNLOAD_MISMATCH); + mismatchError.code = 'CONTENT_DOWNLOAD_MISMATCH'; + callback(mismatchError); + } + else { + callback(); + } + } + _transform(chunk, encoding, callback) { + this.push(chunk, encoding); + try { + if (__classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f")) + __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f").update(chunk); + if (__classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f")) + __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f").update(chunk); + callback(); + } + catch (e) { + callback(e); + } + } + test(hash, sum) { + const check = Buffer.isBuffer(sum) ? sum.toString('base64') : sum; + if (hash === 'crc32c' && __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f")) { + return __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f").validate(check); + } + if (hash === 'md5' && __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f")) { + return __classPrivateFieldGet(this, _HashStreamValidator_md5Digest, "f") === check; + } + return false; + } +} +exports.HashStreamValidator = HashStreamValidator; +_HashStreamValidator_crc32cHash = new WeakMap(), _HashStreamValidator_md5Hash = new WeakMap(), _HashStreamValidator_md5Digest = new WeakMap(); + + +/***/ }), + +/***/ 5891: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HmacKey = void 0; +const index_js_1 = __nccwpck_require__(1325); +const storage_js_1 = __nccwpck_require__(2848); +const promisify_1 = __nccwpck_require__(206); +/** + * The API-formatted resource description of the HMAC key. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name HmacKey#metadata + * @type {object} + */ +/** + * An HmacKey object contains metadata of an HMAC key created from a + * service account through the {@link Storage} client using + * {@link Storage#createHmacKey}. + * + * See {@link https://cloud.google.com/storage/docs/authentication/hmackeys| HMAC keys documentation} + * + * @class + */ +class HmacKey extends index_js_1.ServiceObject { + /** + * @typedef {object} HmacKeyOptions + * @property {string} [projectId] The project ID of the project that owns + * the service account of the requested HMAC key. If not provided, + * the project ID used to instantiate the Storage client will be used. + */ + /** + * Constructs an HmacKey object. + * + * Note: this only create a local reference to an HMAC key, to create + * an HMAC key, use {@link Storage#createHmacKey}. + * + * @param {Storage} storage The Storage instance this HMAC key is + * attached to. + * @param {string} accessId The unique accessId for this HMAC key. + * @param {HmacKeyOptions} options Constructor configurations. + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const hmacKey = storage.hmacKey('access-id'); + * ``` + */ + constructor(storage, accessId, options) { + const methods = { + /** + * @typedef {object} DeleteHmacKeyOptions + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * @typedef {array} DeleteHmacKeyResponse + * @property {object} 0 The full API response. + */ + /** + * @callback DeleteHmacKeyCallback + * @param {?Error} err Request error, if any. + * @param {object} apiResponse The full API response. + */ + /** + * Deletes an HMAC key. + * Key state must be set to `INACTIVE` prior to deletion. + * Caution: HMAC keys cannot be recovered once you delete them. + * + * The authenticated user must have `storage.hmacKeys.delete` permission for the project in which the key exists. + * + * @method HmacKey#delete + * @param {DeleteHmacKeyOptions} [options] Configuration options. + * @param {DeleteHmacKeyCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Delete HMAC key after making the key inactive. + * //- + * const hmacKey = storage.hmacKey('ACCESS_ID'); + * hmacKey.setMetadata({state: 'INACTIVE'}, (err, hmacKeyMetadata) => { + * if (err) { + * // The request was an error. + * console.error(err); + * return; + * } + * hmacKey.delete((err) => { + * if (err) { + * console.error(err); + * return; + * } + * // The HMAC key is deleted. + * }); + * }); + * + * //- + * // If the callback is omitted, a promise is returned. + * //- + * const hmacKey = storage.hmacKey('ACCESS_ID'); + * hmacKey + * .setMetadata({state: 'INACTIVE'}) + * .then(() => { + * return hmacKey.delete(); + * }); + * ``` + */ + delete: true, + /** + * @callback GetHmacKeyCallback + * @param {?Error} err Request error, if any. + * @param {HmacKey} hmacKey this {@link HmacKey} instance. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} GetHmacKeyResponse + * @property {HmacKey} 0 This {@link HmacKey} instance. + * @property {object} 1 The full API response. + */ + /** + * @typedef {object} GetHmacKeyOptions + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * Retrieves and populate an HMAC key's metadata, and return + * this {@link HmacKey} instance. + * + * HmacKey.get() does not give the HMAC key secret, as + * it is only returned on creation. + * + * The authenticated user must have `storage.hmacKeys.get` permission + * for the project in which the key exists. + * + * @method HmacKey#get + * @param {GetHmacKeyOptions} [options] Configuration options. + * @param {GetHmacKeyCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Get the HmacKey's Metadata. + * //- + * storage.hmacKey('ACCESS_ID') + * .get((err, hmacKey) => { + * if (err) { + * // The request was an error. + * console.error(err); + * return; + * } + * // do something with the returned HmacKey object. + * }); + * + * //- + * // If the callback is omitted, a promise is returned. + * //- + * storage.hmacKey('ACCESS_ID') + * .get() + * .then((data) => { + * const hmacKey = data[0]; + * }); + * ``` + */ + get: true, + /** + * @typedef {object} GetHmacKeyMetadataOptions + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * Retrieves and populate an HMAC key's metadata, and return + * the HMAC key's metadata as an object. + * + * HmacKey.getMetadata() does not give the HMAC key secret, as + * it is only returned on creation. + * + * The authenticated user must have `storage.hmacKeys.get` permission + * for the project in which the key exists. + * + * @method HmacKey#getMetadata + * @param {GetHmacKeyMetadataOptions} [options] Configuration options. + * @param {HmacKeyMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * //- + * // Get the HmacKey's metadata and populate to the metadata property. + * //- + * storage.hmacKey('ACCESS_ID') + * .getMetadata((err, hmacKeyMetadata) => { + * if (err) { + * // The request was an error. + * console.error(err); + * return; + * } + * console.log(hmacKeyMetadata); + * }); + * + * //- + * // If the callback is omitted, a promise is returned. + * //- + * storage.hmacKey('ACCESS_ID') + * .getMetadata() + * .then((data) => { + * const hmacKeyMetadata = data[0]; + * console.log(hmacKeyMetadata); + * }); + * ``` + */ + getMetadata: true, + /** + * @typedef {object} SetHmacKeyMetadata Subset of {@link HmacKeyMetadata} to update. + * @property {string} state New state of the HmacKey. Either 'ACTIVE' or 'INACTIVE'. + * @property {string} [etag] Include an etag from a previous get HMAC key request + * to perform safe read-modify-write. + */ + /** + * @typedef {object} SetHmacKeyMetadataOptions + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * @callback HmacKeyMetadataCallback + * @param {?Error} err Request error, if any. + * @param {HmacKeyMetadata} metadata The updated {@link HmacKeyMetadata} object. + * @param {object} apiResponse The full API response. + */ + /** + * @typedef {array} HmacKeyMetadataResponse + * @property {HmacKeyMetadata} 0 The updated {@link HmacKeyMetadata} object. + * @property {object} 1 The full API response. + */ + /** + * Updates the state of an HMAC key. See {@link SetHmacKeyMetadata} for + * valid states. + * + * @method HmacKey#setMetadata + * @param {SetHmacKeyMetadata} metadata The new metadata. + * @param {SetHmacKeyMetadataOptions} [options] Configuration options. + * @param {HmacKeyMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * const metadata = { + * state: 'INACTIVE', + * }; + * + * storage.hmacKey('ACCESS_ID') + * .setMetadata(metadata, (err, hmacKeyMetadata) => { + * if (err) { + * // The request was an error. + * console.error(err); + * return; + * } + * console.log(hmacKeyMetadata); + * }); + * + * //- + * // If the callback is omitted, a promise is returned. + * //- + * storage.hmacKey('ACCESS_ID') + * .setMetadata(metadata) + * .then((data) => { + * const hmacKeyMetadata = data[0]; + * console.log(hmacKeyMetadata); + * }); + * ``` + */ + setMetadata: { + reqOpts: { + method: 'PUT', + }, + }, + }; + const projectId = (options && options.projectId) || storage.projectId; + super({ + parent: storage, + id: accessId, + baseUrl: `/projects/${projectId}/hmacKeys`, + methods, + }); + this.storage = storage; + this.instanceRetryValue = storage.retryOptions.autoRetry; + } + setMetadata(metadata, optionsOrCallback, cb) { + // ETag preconditions are not currently supported. Retries should be disabled if the idempotency strategy is not set to RetryAlways + if (this.storage.retryOptions.idempotencyStrategy !== + storage_js_1.IdempotencyStrategy.RetryAlways) { + this.storage.retryOptions.autoRetry = false; + } + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + cb = + typeof optionsOrCallback === 'function' + ? optionsOrCallback + : cb; + super + .setMetadata(metadata, options) + .then(resp => cb(null, ...resp)) + .catch(cb) + .finally(() => { + this.storage.retryOptions.autoRetry = this.instanceRetryValue; + }); + } +} +exports.HmacKey = HmacKey; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(HmacKey); + + +/***/ }), + +/***/ 2880: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Iam = exports.IAMExceptionMessages = void 0; +const promisify_1 = __nccwpck_require__(206); +const util_js_1 = __nccwpck_require__(4557); +var IAMExceptionMessages; +(function (IAMExceptionMessages) { + IAMExceptionMessages["POLICY_OBJECT_REQUIRED"] = "A policy object is required."; + IAMExceptionMessages["PERMISSIONS_REQUIRED"] = "Permissions are required."; +})(IAMExceptionMessages || (exports.IAMExceptionMessages = IAMExceptionMessages = {})); +/** + * Get and set IAM policies for your Cloud Storage bucket. + * + * See {@link https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management| Cloud Storage IAM Management} + * See {@link https://cloud.google.com/iam/docs/granting-changing-revoking-access| Granting, Changing, and Revoking Access} + * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} + * + * @constructor Iam + * + * @param {Bucket} bucket The parent instance. + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * // bucket.iam + * ``` + */ +class Iam { + constructor(bucket) { + this.request_ = bucket.request.bind(bucket); + this.resourceId_ = 'buckets/' + bucket.getId(); + } + /** + * @typedef {object} GetPolicyOptions Requested options for IAM#getPolicy(). + * @property {number} [requestedPolicyVersion] The version of IAM policies to + * request. If a policy with a condition is requested without setting + * this, the server will return an error. This must be set to a value + * of 3 to retrieve IAM policies containing conditions. This is to + * prevent client code that isn't aware of IAM conditions from + * interpreting and modifying policies incorrectly. The service might + * return a policy with version lower than the one that was requested, + * based on the feature syntax in the policy fetched. + * See {@link https://cloud.google.com/iam/docs/policies#versions| IAM Policy versions} + * @property {string} [userProject] The ID of the project which will be + * billed for the request. + */ + /** + * @typedef {array} GetPolicyResponse + * @property {Policy} 0 The policy. + * @property {object} 1 The full API response. + */ + /** + * @typedef {object} Policy + * @property {PolicyBinding[]} policy.bindings Bindings associate members with roles. + * @property {string} [policy.etag] Etags are used to perform a read-modify-write. + * @property {number} [policy.version] The syntax schema version of the Policy. + * To set an IAM policy with conditional binding, this field must be set to + * 3 or greater. + * See {@link https://cloud.google.com/iam/docs/policies#versions| IAM Policy versions} + */ + /** + * @typedef {object} PolicyBinding + * @property {string} role Role that is assigned to members. + * @property {string[]} members Specifies the identities requesting access for the bucket. + * @property {Expr} [condition] The condition that is associated with this binding. + */ + /** + * @typedef {object} Expr + * @property {string} [title] An optional title for the expression, i.e. a + * short string describing its purpose. This can be used e.g. in UIs + * which allow to enter the expression. + * @property {string} [description] An optional description of the + * expression. This is a longer text which describes the expression, + * e.g. when hovered over it in a UI. + * @property {string} expression Textual representation of an expression in + * Common Expression Language syntax. The application context of the + * containing message determines which well-known feature set of CEL + * is supported.The condition that is associated with this binding. + * + * @see [Condition] https://cloud.google.com/storage/docs/access-control/iam#conditions + */ + /** + * Get the IAM policy. + * + * @param {GetPolicyOptions} [options] Request options. + * @param {GetPolicyCallback} [callback] Callback function. + * @returns {Promise} + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/getIamPolicy| Buckets: setIamPolicy API Documentation} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * bucket.iam.getPolicy( + * {requestedPolicyVersion: 3}, + * function(err, policy, apiResponse) { + * + * }, + * ); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.getPolicy({requestedPolicyVersion: 3}) + * .then(function(data) { + * const policy = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/iam.js + * region_tag:storage_view_bucket_iam_members + * Example of retrieving a bucket's IAM policy: + */ + getPolicy(optionsOrCallback, callback) { + const { options, callback: cb } = (0, util_js_1.normalize)(optionsOrCallback, callback); + const qs = {}; + if (options.userProject) { + qs.userProject = options.userProject; + } + if (options.requestedPolicyVersion !== null && + options.requestedPolicyVersion !== undefined) { + qs.optionsRequestedPolicyVersion = options.requestedPolicyVersion; + } + this.request_({ + uri: '/iam', + qs, + }, cb); + } + /** + * Set the IAM policy. + * + * @throws {Error} If no policy is provided. + * + * @param {Policy} policy The policy. + * @param {SetPolicyOptions} [options] Configuration options. + * @param {SetPolicyCallback} callback Callback function. + * @returns {Promise} + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/setIamPolicy| Buckets: setIamPolicy API Documentation} + * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * const myPolicy = { + * bindings: [ + * { + * role: 'roles/storage.admin', + * members: + * ['serviceAccount:myotherproject@appspot.gserviceaccount.com'] + * } + * ] + * }; + * + * bucket.iam.setPolicy(myPolicy, function(err, policy, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.setPolicy(myPolicy).then(function(data) { + * const policy = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/iam.js + * region_tag:storage_add_bucket_iam_member + * Example of adding to a bucket's IAM policy: + * + * @example include:samples/iam.js + * region_tag:storage_remove_bucket_iam_member + * Example of removing from a bucket's IAM policy: + */ + setPolicy(policy, optionsOrCallback, callback) { + if (policy === null || typeof policy !== 'object') { + throw new Error(IAMExceptionMessages.POLICY_OBJECT_REQUIRED); + } + const { options, callback: cb } = (0, util_js_1.normalize)(optionsOrCallback, callback); + let maxRetries; + if (policy.etag === undefined) { + maxRetries = 0; + } + this.request_({ + method: 'PUT', + uri: '/iam', + maxRetries, + json: Object.assign({ + resourceId: this.resourceId_, + }, policy), + qs: options, + }, cb); + } + /** + * Test a set of permissions for a resource. + * + * @throws {Error} If permissions are not provided. + * + * @param {string|string[]} permissions The permission(s) to test for. + * @param {TestIamPermissionsOptions} [options] Configuration object. + * @param {TestIamPermissionsCallback} [callback] Callback function. + * @returns {Promise} + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/testIamPermissions| Buckets: testIamPermissions API Documentation} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * + * //- + * // Test a single permission. + * //- + * const test = 'storage.buckets.delete'; + * + * bucket.iam.testPermissions(test, function(err, permissions, apiResponse) { + * console.log(permissions); + * // { + * // "storage.buckets.delete": true + * // } + * }); + * + * //- + * // Test several permissions at once. + * //- + * const tests = [ + * 'storage.buckets.delete', + * 'storage.buckets.get' + * ]; + * + * bucket.iam.testPermissions(tests, function(err, permissions) { + * console.log(permissions); + * // { + * // "storage.buckets.delete": false, + * // "storage.buckets.get": true + * // } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * bucket.iam.testPermissions(test).then(function(data) { + * const permissions = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + testPermissions(permissions, optionsOrCallback, callback) { + if (!Array.isArray(permissions) && typeof permissions !== 'string') { + throw new Error(IAMExceptionMessages.PERMISSIONS_REQUIRED); + } + const { options, callback: cb } = (0, util_js_1.normalize)(optionsOrCallback, callback); + const permissionsArray = Array.isArray(permissions) + ? permissions + : [permissions]; + const req = Object.assign({ + permissions: permissionsArray, + }, options); + this.request_({ + uri: '/iam/testPermissions', + qs: req, + useQuerystring: true, + }, (err, resp) => { + if (err) { + cb(err, null, resp); + return; + } + const availablePermissions = Array.isArray(resp.permissions) + ? resp.permissions + : []; + const permissionsHash = permissionsArray.reduce((acc, permission) => { + acc[permission] = availablePermissions.indexOf(permission) > -1; + return acc; + }, {}); + cb(null, permissionsHash, resp); + }); + } +} +exports.Iam = Iam; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Iam); + + +/***/ }), + +/***/ 8525: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Notification = exports.Iam = exports.HmacKey = exports.File = exports.Channel = exports.Bucket = exports.Storage = exports.RETRYABLE_ERR_FN_DEFAULT = exports.IdempotencyStrategy = exports.ApiError = void 0; +/** + * The `@google-cloud/storage` package has a single named export which is the + * {@link Storage} (ES6) class, which should be instantiated with `new`. + * + * See {@link Storage} and {@link ClientConfig} for client methods and + * configuration options. + * + * @module {Storage} @google-cloud/storage + * @alias nodejs-storage + * + * @example + * Install the client library with npm: + * ``` + * npm install --save @google-cloud/storage + * ``` + * + * @example + * Import the client library + * ``` + * const {Storage} = require('@google-cloud/storage'); + * ``` + * + * @example + * Create a client that uses Application + * Default Credentials (ADC): + * ``` + * const storage = new Storage(); + * ``` + * + * @example + * Create a client with explicit + * credentials: + * ``` + * const storage = new Storage({ projectId: + * 'your-project-id', keyFilename: '/path/to/keyfile.json' + * }); + * ``` + * + * @example include:samples/quickstart.js + * region_tag:storage_quickstart + * Full quickstart example: + */ +var index_js_1 = __nccwpck_require__(1325); +Object.defineProperty(exports, "ApiError", ({ enumerable: true, get: function () { return index_js_1.ApiError; } })); +var storage_js_1 = __nccwpck_require__(2848); +Object.defineProperty(exports, "IdempotencyStrategy", ({ enumerable: true, get: function () { return storage_js_1.IdempotencyStrategy; } })); +Object.defineProperty(exports, "RETRYABLE_ERR_FN_DEFAULT", ({ enumerable: true, get: function () { return storage_js_1.RETRYABLE_ERR_FN_DEFAULT; } })); +Object.defineProperty(exports, "Storage", ({ enumerable: true, get: function () { return storage_js_1.Storage; } })); +var bucket_js_1 = __nccwpck_require__(2887); +Object.defineProperty(exports, "Bucket", ({ enumerable: true, get: function () { return bucket_js_1.Bucket; } })); +__exportStar(__nccwpck_require__(4317), exports); +var channel_js_1 = __nccwpck_require__(2658); +Object.defineProperty(exports, "Channel", ({ enumerable: true, get: function () { return channel_js_1.Channel; } })); +var file_js_1 = __nccwpck_require__(9975); +Object.defineProperty(exports, "File", ({ enumerable: true, get: function () { return file_js_1.File; } })); +__exportStar(__nccwpck_require__(4873), exports); +var hmacKey_js_1 = __nccwpck_require__(5891); +Object.defineProperty(exports, "HmacKey", ({ enumerable: true, get: function () { return hmacKey_js_1.HmacKey; } })); +var iam_js_1 = __nccwpck_require__(2880); +Object.defineProperty(exports, "Iam", ({ enumerable: true, get: function () { return iam_js_1.Iam; } })); +var notification_js_1 = __nccwpck_require__(8598); +Object.defineProperty(exports, "Notification", ({ enumerable: true, get: function () { return notification_js_1.Notification; } })); +__exportStar(__nccwpck_require__(4), exports); + + +/***/ }), + +/***/ 1325: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.util = exports.ApiError = exports.ServiceObject = exports.Service = void 0; +var service_js_1 = __nccwpck_require__(8542); +Object.defineProperty(exports, "Service", ({ enumerable: true, get: function () { return service_js_1.Service; } })); +var service_object_js_1 = __nccwpck_require__(2908); +Object.defineProperty(exports, "ServiceObject", ({ enumerable: true, get: function () { return service_object_js_1.ServiceObject; } })); +var util_js_1 = __nccwpck_require__(7325); +Object.defineProperty(exports, "ApiError", ({ enumerable: true, get: function () { return util_js_1.ApiError; } })); +Object.defineProperty(exports, "util", ({ enumerable: true, get: function () { return util_js_1.util; } })); + + +/***/ }), + +/***/ 2908: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ServiceObject = void 0; +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +const promisify_1 = __nccwpck_require__(206); +const events_1 = __nccwpck_require__(4434); +const util_js_1 = __nccwpck_require__(7325); +/** + * ServiceObject is a base class, meant to be inherited from by a "service + * object," like a BigQuery dataset or Storage bucket. + * + * Most of the time, these objects share common functionality; they can be + * created or deleted, and you can get or set their metadata. + * + * By inheriting from this class, a service object will be extended with these + * shared behaviors. Note that any method can be overridden when the service + * object requires specific behavior. + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +class ServiceObject extends events_1.EventEmitter { + /* + * @constructor + * @alias module:common/service-object + * + * @private + * + * @param {object} config - Configuration object. + * @param {string} config.baseUrl - The base URL to make API requests to. + * @param {string} config.createMethod - The method which creates this object. + * @param {string=} config.id - The identifier of the object. For example, the + * name of a Storage bucket or Pub/Sub topic. + * @param {object=} config.methods - A map of each method name that should be inherited. + * @param {object} config.methods[].reqOpts - Default request options for this + * particular method. A common use case is when `setMetadata` requires a + * `PUT` method to override the default `PATCH`. + * @param {object} config.parent - The parent service instance. For example, an + * instance of Storage if the object is Bucket. + */ + constructor(config) { + super(); + this.metadata = {}; + this.baseUrl = config.baseUrl; + this.parent = config.parent; // Parent class. + this.id = config.id; // Name or ID (e.g. dataset ID, bucket name, etc). + this.createMethod = config.createMethod; + this.methods = config.methods || {}; + this.interceptors = []; + this.projectId = config.projectId; + if (config.methods) { + // This filters the ServiceObject instance (e.g. a "File") to only have + // the configured methods. We make a couple of exceptions for core- + // functionality ("request()" and "getRequestInterceptors()") + Object.getOwnPropertyNames(ServiceObject.prototype) + .filter(methodName => { + return ( + // All ServiceObjects need `request` and `getRequestInterceptors`. + // clang-format off + !/^request/.test(methodName) && + !/^getRequestInterceptors/.test(methodName) && + // clang-format on + // The ServiceObject didn't redefine the method. + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this[methodName] === + // eslint-disable-next-line @typescript-eslint/no-explicit-any + ServiceObject.prototype[methodName] && + // This method isn't wanted. + !config.methods[methodName]); + }) + .forEach(methodName => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this[methodName] = undefined; + }); + } + } + create(optionsOrCallback, callback) { + // eslint-disable-next-line @typescript-eslint/no-this-alias + const self = this; + const args = [this.id]; + if (typeof optionsOrCallback === 'function') { + callback = optionsOrCallback; + } + if (typeof optionsOrCallback === 'object') { + args.push(optionsOrCallback); + } + // Wrap the callback to return *this* instance of the object, not the + // newly-created one. + // tslint: disable-next-line no-any + function onCreate(...args) { + const [err, instance] = args; + if (!err) { + self.metadata = instance.metadata; + if (self.id && instance.metadata) { + self.id = instance.metadata.id; + } + args[1] = self; // replace the created `instance` with this one. + } + callback(...args); + } + args.push(onCreate); + // eslint-disable-next-line prefer-spread + this.createMethod.apply(null, args); + } + delete(optionsOrCallback, cb) { + var _a; + const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + const ignoreNotFound = options.ignoreNotFound; + delete options.ignoreNotFound; + const methodConfig = (typeof this.methods.delete === 'object' && this.methods.delete) || {}; + const reqOpts = { + method: 'DELETE', + uri: '', + ...methodConfig.reqOpts, + qs: { + ...(_a = methodConfig.reqOpts) === null || _a === void 0 ? void 0 : _a.qs, + ...options, + }, + }; + // The `request` method may have been overridden to hold any special + // behavior. Ensure we call the original `request` method. + ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => { + if (err) { + if (err.code === 404 && ignoreNotFound) { + err = null; + } + } + callback(err, res); + }); + } + exists(optionsOrCallback, cb) { + const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + this.get(options, err => { + if (err) { + if (err.code === 404) { + callback(null, false); + } + else { + callback(err); + } + return; + } + callback(null, true); + }); + } + get(optionsOrCallback, cb) { + // eslint-disable-next-line @typescript-eslint/no-this-alias + const self = this; + const [opts, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + const options = Object.assign({}, opts); + const autoCreate = options.autoCreate && typeof this.create === 'function'; + delete options.autoCreate; + function onCreate(err, instance, apiResponse) { + if (err) { + if (err.code === 409) { + self.get(options, callback); + return; + } + callback(err, null, apiResponse); + return; + } + callback(null, instance, apiResponse); + } + this.getMetadata(options, (err, metadata) => { + if (err) { + if (err.code === 404 && autoCreate) { + const args = []; + if (Object.keys(options).length > 0) { + args.push(options); + } + args.push(onCreate); + self.create(...args); + return; + } + callback(err, null, metadata); + return; + } + callback(null, self, metadata); + }); + } + getMetadata(optionsOrCallback, cb) { + var _a; + const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + const methodConfig = (typeof this.methods.getMetadata === 'object' && + this.methods.getMetadata) || + {}; + const reqOpts = { + uri: '', + ...methodConfig.reqOpts, + qs: { + ...(_a = methodConfig.reqOpts) === null || _a === void 0 ? void 0 : _a.qs, + ...options, + }, + }; + // The `request` method may have been overridden to hold any special + // behavior. Ensure we call the original `request` method. + ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => { + this.metadata = body; + callback(err, this.metadata, res); + }); + } + /** + * Return the user's custom request interceptors. + */ + getRequestInterceptors() { + // Interceptors should be returned in the order they were assigned. + const localInterceptors = this.interceptors + .filter(interceptor => typeof interceptor.request === 'function') + .map(interceptor => interceptor.request); + return this.parent.getRequestInterceptors().concat(localInterceptors); + } + setMetadata(metadata, optionsOrCallback, cb) { + var _a, _b; + const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); + const methodConfig = (typeof this.methods.setMetadata === 'object' && + this.methods.setMetadata) || + {}; + const reqOpts = { + method: 'PATCH', + uri: '', + ...methodConfig.reqOpts, + json: { + ...(_a = methodConfig.reqOpts) === null || _a === void 0 ? void 0 : _a.json, + ...metadata, + }, + qs: { + ...(_b = methodConfig.reqOpts) === null || _b === void 0 ? void 0 : _b.qs, + ...options, + }, + }; + // The `request` method may have been overridden to hold any special + // behavior. Ensure we call the original `request` method. + ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => { + this.metadata = body; + callback(err, this.metadata, res); + }); + } + request_(reqOpts, callback) { + reqOpts = { ...reqOpts }; + if (this.projectId) { + reqOpts.projectId = this.projectId; + } + const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0; + const uriComponents = [this.baseUrl, this.id || '', reqOpts.uri]; + if (isAbsoluteUrl) { + uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri)); + } + reqOpts.uri = uriComponents + .filter(x => x.trim()) // Limit to non-empty strings. + .map(uriComponent => { + const trimSlashesRegex = /^\/*|\/*$/g; + return uriComponent.replace(trimSlashesRegex, ''); + }) + .join('/'); + const childInterceptors = Array.isArray(reqOpts.interceptors_) + ? reqOpts.interceptors_ + : []; + const localInterceptors = [].slice.call(this.interceptors); + reqOpts.interceptors_ = childInterceptors.concat(localInterceptors); + if (reqOpts.shouldReturnStream) { + return this.parent.requestStream(reqOpts); + } + this.parent.request(reqOpts, callback); + } + request(reqOpts, callback) { + this.request_(reqOpts, callback); + } + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + */ + requestStream(reqOpts) { + const opts = { ...reqOpts, shouldReturnStream: true }; + return this.request_(opts); + } +} +exports.ServiceObject = ServiceObject; +(0, promisify_1.promisifyAll)(ServiceObject, { exclude: ['getRequestInterceptors'] }); + + +/***/ }), + +/***/ 8542: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Service = exports.DEFAULT_PROJECT_ID_TOKEN = void 0; +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +const google_auth_library_1 = __nccwpck_require__(492); +const uuid = __importStar(__nccwpck_require__(2048)); +const util_js_1 = __nccwpck_require__(7325); +const util_js_2 = __nccwpck_require__(4557); +exports.DEFAULT_PROJECT_ID_TOKEN = '{{projectId}}'; +class Service { + /** + * Service is a base class, meant to be inherited from by a "service," like + * BigQuery or Storage. + * + * This handles making authenticated requests by exposing a `makeReq_` + * function. + * + * @constructor + * @alias module:common/service + * + * @param {object} config - Configuration object. + * @param {string} config.baseUrl - The base URL to make API requests to. + * @param {string[]} config.scopes - The scopes required for the request. + * @param {object=} options - [Configuration object](#/docs). + */ + constructor(config, options = {}) { + this.baseUrl = config.baseUrl; + this.apiEndpoint = config.apiEndpoint; + this.timeout = options.timeout; + this.globalInterceptors = Array.isArray(options.interceptors_) + ? options.interceptors_ + : []; + this.interceptors = []; + this.packageJson = config.packageJson; + this.projectId = options.projectId || exports.DEFAULT_PROJECT_ID_TOKEN; + this.projectIdRequired = config.projectIdRequired !== false; + this.providedUserAgent = options.userAgent; + this.universeDomain = options.universeDomain || google_auth_library_1.DEFAULT_UNIVERSE; + this.customEndpoint = config.customEndpoint || false; + this.makeAuthenticatedRequest = util_js_1.util.makeAuthenticatedRequestFactory({ + ...config, + projectIdRequired: this.projectIdRequired, + projectId: this.projectId, + authClient: options.authClient || config.authClient, + credentials: options.credentials, + keyFile: options.keyFilename, + email: options.email, + clientOptions: { + universeDomain: options.universeDomain, + ...options.clientOptions, + }, + }); + this.authClient = this.makeAuthenticatedRequest.authClient; + const isCloudFunctionEnv = !!process.env.FUNCTION_NAME; + if (isCloudFunctionEnv) { + this.interceptors.push({ + request(reqOpts) { + reqOpts.forever = false; + return reqOpts; + }, + }); + } + } + /** + * Return the user's custom request interceptors. + */ + getRequestInterceptors() { + // Interceptors should be returned in the order they were assigned. + return [].slice + .call(this.globalInterceptors) + .concat(this.interceptors) + .filter(interceptor => typeof interceptor.request === 'function') + .map(interceptor => interceptor.request); + } + getProjectId(callback) { + if (!callback) { + return this.getProjectIdAsync(); + } + this.getProjectIdAsync().then(p => callback(null, p), callback); + } + async getProjectIdAsync() { + const projectId = await this.authClient.getProjectId(); + if (this.projectId === exports.DEFAULT_PROJECT_ID_TOKEN && projectId) { + this.projectId = projectId; + } + return this.projectId; + } + request_(reqOpts, callback) { + reqOpts = { ...reqOpts, timeout: this.timeout }; + const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0; + const uriComponents = [this.baseUrl]; + if (this.projectIdRequired) { + if (reqOpts.projectId) { + uriComponents.push('projects'); + uriComponents.push(reqOpts.projectId); + } + else { + uriComponents.push('projects'); + uriComponents.push(this.projectId); + } + } + uriComponents.push(reqOpts.uri); + if (isAbsoluteUrl) { + uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri)); + } + reqOpts.uri = uriComponents + .map(uriComponent => { + const trimSlashesRegex = /^\/*|\/*$/g; + return uriComponent.replace(trimSlashesRegex, ''); + }) + .join('/') + // Some URIs have colon separators. + // Bad: https://.../projects/:list + // Good: https://.../projects:list + .replace(/\/:/g, ':'); + const requestInterceptors = this.getRequestInterceptors(); + const interceptorArray = Array.isArray(reqOpts.interceptors_) + ? reqOpts.interceptors_ + : []; + interceptorArray.forEach(interceptor => { + if (typeof interceptor.request === 'function') { + requestInterceptors.push(interceptor.request); + } + }); + requestInterceptors.forEach(requestInterceptor => { + reqOpts = requestInterceptor(reqOpts); + }); + delete reqOpts.interceptors_; + const pkg = this.packageJson; + let userAgent = (0, util_js_2.getUserAgentString)(); + if (this.providedUserAgent) { + userAgent = `${this.providedUserAgent} ${userAgent}`; + } + reqOpts.headers = { + ...reqOpts.headers, + 'User-Agent': userAgent, + 'x-goog-api-client': `${(0, util_js_2.getRuntimeTrackingString)()} gccl/${pkg.version}-${(0, util_js_2.getModuleFormat)()} gccl-invocation-id/${uuid.v4()}`, + }; + if (reqOpts[util_js_1.GCCL_GCS_CMD_KEY]) { + reqOpts.headers['x-goog-api-client'] += + ` gccl-gcs-cmd/${reqOpts[util_js_1.GCCL_GCS_CMD_KEY]}`; + } + if (reqOpts.shouldReturnStream) { + return this.makeAuthenticatedRequest(reqOpts); + } + else { + this.makeAuthenticatedRequest(reqOpts, callback); + } + } + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + * @param {function} callback - The callback function passed to `request`. + */ + request(reqOpts, callback) { + Service.prototype.request_.call(this, reqOpts, callback); + } + /** + * Make an authenticated API request. + * + * @param {object} reqOpts - Request options that are passed to `request`. + * @param {string} reqOpts.uri - A URI relative to the baseUrl. + */ + requestStream(reqOpts) { + const opts = { ...reqOpts, shouldReturnStream: true }; + return Service.prototype.request_.call(this, opts); + } +} +exports.Service = Service; + + +/***/ }), + +/***/ 7325: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.util = exports.Util = exports.PartialFailureError = exports.ApiError = exports.GCCL_GCS_CMD_KEY = void 0; +/*! + * @module common/util + */ +const projectify_1 = __nccwpck_require__(7635); +const htmlEntities = __importStar(__nccwpck_require__(3660)); +const google_auth_library_1 = __nccwpck_require__(492); +const retry_request_1 = __importDefault(__nccwpck_require__(7842)); +const stream_1 = __nccwpck_require__(2203); +const teeny_request_1 = __nccwpck_require__(321); +const uuid = __importStar(__nccwpck_require__(2048)); +const service_js_1 = __nccwpck_require__(8542); +const util_js_1 = __nccwpck_require__(4557); +const duplexify_1 = __importDefault(__nccwpck_require__(9112)); +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +const package_json_helper_cjs_1 = __nccwpck_require__(1969); +const packageJson = (0, package_json_helper_cjs_1.getPackageJSON)(); +/** + * A unique symbol for providing a `gccl-gcs-cmd` value + * for the `X-Goog-API-Client` header. + * + * E.g. the `V` in `X-Goog-API-Client: gccl-gcs-cmd/V` + **/ +exports.GCCL_GCS_CMD_KEY = Symbol.for('GCCL_GCS_CMD'); +const requestDefaults = { + timeout: 60000, + gzip: true, + forever: true, + pool: { + maxSockets: Infinity, + }, +}; +/** + * Default behavior: Automatically retry retriable server errors. + * + * @const {boolean} + * @private + */ +const AUTO_RETRY_DEFAULT = true; +/** + * Default behavior: Only attempt to retry retriable errors 3 times. + * + * @const {number} + * @private + */ +const MAX_RETRY_DEFAULT = 3; +/** + * Custom error type for API errors. + * + * @param {object} errorBody - Error object. + */ +class ApiError extends Error { + constructor(errorBodyOrMessage) { + super(); + if (typeof errorBodyOrMessage !== 'object') { + this.message = errorBodyOrMessage || ''; + return; + } + const errorBody = errorBodyOrMessage; + this.code = errorBody.code; + this.errors = errorBody.errors; + this.response = errorBody.response; + try { + this.errors = JSON.parse(this.response.body).error.errors; + } + catch (e) { + this.errors = errorBody.errors; + } + this.message = ApiError.createMultiErrorMessage(errorBody, this.errors); + Error.captureStackTrace(this); + } + /** + * Pieces together an error message by combining all unique error messages + * returned from a single GoogleError + * + * @private + * + * @param {GoogleErrorBody} err The original error. + * @param {GoogleInnerError[]} [errors] Inner errors, if any. + * @returns {string} + */ + static createMultiErrorMessage(err, errors) { + const messages = new Set(); + if (err.message) { + messages.add(err.message); + } + if (errors && errors.length) { + errors.forEach(({ message }) => messages.add(message)); + } + else if (err.response && err.response.body) { + messages.add(htmlEntities.decode(err.response.body.toString())); + } + else if (!err.message) { + messages.add('A failure occurred during this request.'); + } + let messageArr = Array.from(messages); + if (messageArr.length > 1) { + messageArr = messageArr.map((message, i) => ` ${i + 1}. ${message}`); + messageArr.unshift('Multiple errors occurred during the request. Please see the `errors` array for complete details.\n'); + messageArr.push('\n'); + } + return messageArr.join('\n'); + } +} +exports.ApiError = ApiError; +/** + * Custom error type for partial errors returned from the API. + * + * @param {object} b - Error object. + */ +class PartialFailureError extends Error { + constructor(b) { + super(); + const errorObject = b; + this.errors = errorObject.errors; + this.name = 'PartialFailureError'; + this.response = errorObject.response; + this.message = ApiError.createMultiErrorMessage(errorObject, this.errors); + } +} +exports.PartialFailureError = PartialFailureError; +class Util { + constructor() { + this.ApiError = ApiError; + this.PartialFailureError = PartialFailureError; + } + /** + * No op. + * + * @example + * function doSomething(callback) { + * callback = callback || noop; + * } + */ + noop() { } + /** + * Uniformly process an API response. + * + * @param {*} err - Error value. + * @param {*} resp - Response value. + * @param {*} body - Body value. + * @param {function} callback - The callback function. + */ + handleResp(err, resp, body, callback) { + callback = callback || util.noop; + const parsedResp = { + err: err || null, + ...(resp && util.parseHttpRespMessage(resp)), + ...(body && util.parseHttpRespBody(body)), + }; + // Assign the parsed body to resp.body, even if { json: false } was passed + // as a request option. + // We assume that nobody uses the previously unparsed value of resp.body. + if (!parsedResp.err && resp && typeof parsedResp.body === 'object') { + parsedResp.resp.body = parsedResp.body; + } + if (parsedResp.err && resp) { + parsedResp.err.response = resp; + } + callback(parsedResp.err, parsedResp.body, parsedResp.resp); + } + /** + * Sniff an incoming HTTP response message for errors. + * + * @param {object} httpRespMessage - An incoming HTTP response message from `request`. + * @return {object} parsedHttpRespMessage - The parsed response. + * @param {?error} parsedHttpRespMessage.err - An error detected. + * @param {object} parsedHttpRespMessage.resp - The original response object. + */ + parseHttpRespMessage(httpRespMessage) { + const parsedHttpRespMessage = { + resp: httpRespMessage, + }; + if (httpRespMessage.statusCode < 200 || httpRespMessage.statusCode > 299) { + // Unknown error. Format according to ApiError standard. + parsedHttpRespMessage.err = new ApiError({ + errors: new Array(), + code: httpRespMessage.statusCode, + message: httpRespMessage.statusMessage, + response: httpRespMessage, + }); + } + return parsedHttpRespMessage; + } + /** + * Parse the response body from an HTTP request. + * + * @param {object} body - The response body. + * @return {object} parsedHttpRespMessage - The parsed response. + * @param {?error} parsedHttpRespMessage.err - An error detected. + * @param {object} parsedHttpRespMessage.body - The original body value provided + * will try to be JSON.parse'd. If it's successful, the parsed value will + * be returned here, otherwise the original value and an error will be returned. + */ + parseHttpRespBody(body) { + const parsedHttpRespBody = { + body, + }; + if (typeof body === 'string') { + try { + parsedHttpRespBody.body = JSON.parse(body); + } + catch (err) { + parsedHttpRespBody.body = body; + } + } + if (parsedHttpRespBody.body && parsedHttpRespBody.body.error) { + // Error from JSON API. + parsedHttpRespBody.err = new ApiError(parsedHttpRespBody.body.error); + } + return parsedHttpRespBody; + } + /** + * Take a Duplexify stream, fetch an authenticated connection header, and + * create an outgoing writable stream. + * + * @param {Duplexify} dup - Duplexify stream. + * @param {object} options - Configuration object. + * @param {module:common/connection} options.connection - A connection instance used to get a token with and send the request through. + * @param {object} options.metadata - Metadata to send at the head of the request. + * @param {object} options.request - Request object, in the format of a standard Node.js http.request() object. + * @param {string=} options.request.method - Default: "POST". + * @param {string=} options.request.qs.uploadType - Default: "multipart". + * @param {string=} options.streamContentType - Default: "application/octet-stream". + * @param {function} onComplete - Callback, executed after the writable Request stream has completed. + */ + makeWritableStream(dup, options, onComplete) { + var _a; + onComplete = onComplete || util.noop; + const writeStream = new ProgressStream(); + writeStream.on('progress', evt => dup.emit('progress', evt)); + dup.setWritable(writeStream); + const defaultReqOpts = { + method: 'POST', + qs: { + uploadType: 'multipart', + }, + timeout: 0, + maxRetries: 0, + }; + const metadata = options.metadata || {}; + const reqOpts = { + ...defaultReqOpts, + ...options.request, + qs: { + ...defaultReqOpts.qs, + ...(_a = options.request) === null || _a === void 0 ? void 0 : _a.qs, + }, + multipart: [ + { + 'Content-Type': 'application/json', + body: JSON.stringify(metadata), + }, + { + 'Content-Type': metadata.contentType || 'application/octet-stream', + body: writeStream, + }, + ], + }; + options.makeAuthenticatedRequest(reqOpts, { + onAuthenticated(err, authenticatedReqOpts) { + if (err) { + dup.destroy(err); + return; + } + requestDefaults.headers = util._getDefaultHeaders(reqOpts[exports.GCCL_GCS_CMD_KEY]); + const request = teeny_request_1.teenyRequest.defaults(requestDefaults); + request(authenticatedReqOpts, (err, resp, body) => { + util.handleResp(err, resp, body, (err, data) => { + if (err) { + dup.destroy(err); + return; + } + dup.emit('response', resp); + onComplete(data); + }); + }); + }, + }); + } + /** + * Returns true if the API request should be retried, given the error that was + * given the first time the request was attempted. This is used for rate limit + * related errors as well as intermittent server errors. + * + * @param {error} err - The API error to check if it is appropriate to retry. + * @return {boolean} True if the API request should be retried, false otherwise. + */ + shouldRetryRequest(err) { + if (err) { + if ([408, 429, 500, 502, 503, 504].indexOf(err.code) !== -1) { + return true; + } + if (err.errors) { + for (const e of err.errors) { + const reason = e.reason; + if (reason === 'rateLimitExceeded') { + return true; + } + if (reason === 'userRateLimitExceeded') { + return true; + } + if (reason && reason.includes('EAI_AGAIN')) { + return true; + } + } + } + } + return false; + } + /** + * Get a function for making authenticated requests. + * + * @param {object} config - Configuration object. + * @param {boolean=} config.autoRetry - Automatically retry requests if the + * response is related to rate limits or certain intermittent server + * errors. We will exponentially backoff subsequent requests by default. + * (default: true) + * @param {object=} config.credentials - Credentials object. + * @param {boolean=} config.customEndpoint - If true, just return the provided request options. Default: false. + * @param {boolean=} config.useAuthWithCustomEndpoint - If true, will authenticate when using a custom endpoint. Default: false. + * @param {string=} config.email - Account email address, required for PEM/P12 usage. + * @param {number=} config.maxRetries - Maximum number of automatic retries attempted before returning the error. (default: 3) + * @param {string=} config.keyFile - Path to a .json, .pem, or .p12 keyfile. + * @param {array} config.scopes - Array of scopes required for the API. + */ + makeAuthenticatedRequestFactory(config) { + const googleAutoAuthConfig = { ...config }; + if (googleAutoAuthConfig.projectId === service_js_1.DEFAULT_PROJECT_ID_TOKEN) { + delete googleAutoAuthConfig.projectId; + } + let authClient; + if (googleAutoAuthConfig.authClient instanceof google_auth_library_1.GoogleAuth) { + // Use an existing `GoogleAuth` + authClient = googleAutoAuthConfig.authClient; + } + else { + // Pass an `AuthClient` & `clientOptions` to `GoogleAuth`, if available + authClient = new google_auth_library_1.GoogleAuth({ + ...googleAutoAuthConfig, + authClient: googleAutoAuthConfig.authClient, + clientOptions: googleAutoAuthConfig.clientOptions, + }); + } + function makeAuthenticatedRequest(reqOpts, optionsOrCallback) { + let stream; + let projectId; + const reqConfig = { ...config }; + let activeRequest_; + if (!optionsOrCallback) { + stream = (0, duplexify_1.default)(); + reqConfig.stream = stream; + } + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : undefined; + const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : undefined; + async function setProjectId() { + projectId = await authClient.getProjectId(); + } + const onAuthenticated = async (err, authenticatedReqOpts) => { + const authLibraryError = err; + const autoAuthFailed = err && + typeof err.message === 'string' && + err.message.indexOf('Could not load the default credentials') > -1; + if (autoAuthFailed) { + // Even though authentication failed, the API might not actually + // care. + authenticatedReqOpts = reqOpts; + } + if (!err || autoAuthFailed) { + try { + // Try with existing `projectId` value + authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId); + err = null; + } + catch (e) { + if (e instanceof projectify_1.MissingProjectIdError) { + // A `projectId` was required, but we don't have one. + try { + // Attempt to get the `projectId` + await setProjectId(); + authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId); + err = null; + } + catch (e) { + // Re-use the "Could not load the default credentials error" if + // auto auth failed. + err = err || e; + } + } + else { + // Some other error unrelated to missing `projectId` + err = err || e; + } + } + } + if (err) { + if (stream) { + stream.destroy(err); + } + else { + const fn = options && options.onAuthenticated + ? options.onAuthenticated + : callback; + fn(err); + } + return; + } + if (options && options.onAuthenticated) { + options.onAuthenticated(null, authenticatedReqOpts); + } + else { + activeRequest_ = util.makeRequest(authenticatedReqOpts, reqConfig, (apiResponseError, ...params) => { + if (apiResponseError && + apiResponseError.code === 401 && + authLibraryError) { + // Re-use the "Could not load the default credentials error" if + // the API request failed due to missing credentials. + apiResponseError = authLibraryError; + } + callback(apiResponseError, ...params); + }); + } + }; + const prepareRequest = async () => { + try { + const getProjectId = async () => { + if (config.projectId && + config.projectId !== service_js_1.DEFAULT_PROJECT_ID_TOKEN) { + // The user provided a project ID. We don't need to check with the + // auth client, it could be incorrect. + return config.projectId; + } + if (config.projectIdRequired === false) { + // A projectId is not required. Return the default. + return service_js_1.DEFAULT_PROJECT_ID_TOKEN; + } + return setProjectId(); + }; + const authorizeRequest = async () => { + if (reqConfig.customEndpoint && + !reqConfig.useAuthWithCustomEndpoint) { + // Using a custom API override. Do not use `google-auth-library` for + // authentication. (ex: connecting to a local Datastore server) + return reqOpts; + } + else { + return authClient.authorizeRequest(reqOpts); + } + }; + const [_projectId, authorizedReqOpts] = await Promise.all([ + getProjectId(), + authorizeRequest(), + ]); + if (_projectId) { + projectId = _projectId; + } + return onAuthenticated(null, authorizedReqOpts); + } + catch (e) { + return onAuthenticated(e); + } + }; + prepareRequest(); + if (stream) { + return stream; + } + return { + abort() { + setImmediate(() => { + if (activeRequest_) { + activeRequest_.abort(); + activeRequest_ = null; + } + }); + }, + }; + } + const mar = makeAuthenticatedRequest; + mar.getCredentials = authClient.getCredentials.bind(authClient); + mar.authClient = authClient; + return mar; + } + /** + * Make a request through the `retryRequest` module with built-in error + * handling and exponential back off. + * + * @param {object} reqOpts - Request options in the format `request` expects. + * @param {object=} config - Configuration object. + * @param {boolean=} config.autoRetry - Automatically retry requests if the + * response is related to rate limits or certain intermittent server + * errors. We will exponentially backoff subsequent requests by default. + * (default: true) + * @param {number=} config.maxRetries - Maximum number of automatic retries + * attempted before returning the error. (default: 3) + * @param {object=} config.request - HTTP module for request calls. + * @param {function} callback - The callback function. + */ + makeRequest(reqOpts, config, callback) { + var _a, _b, _c, _d, _e; + let autoRetryValue = AUTO_RETRY_DEFAULT; + if (config.autoRetry !== undefined) { + autoRetryValue = config.autoRetry; + } + else if (((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.autoRetry) !== undefined) { + autoRetryValue = config.retryOptions.autoRetry; + } + let maxRetryValue = MAX_RETRY_DEFAULT; + if (config.maxRetries !== undefined) { + maxRetryValue = config.maxRetries; + } + else if (((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.maxRetries) !== undefined) { + maxRetryValue = config.retryOptions.maxRetries; + } + requestDefaults.headers = this._getDefaultHeaders(reqOpts[exports.GCCL_GCS_CMD_KEY]); + const options = { + request: teeny_request_1.teenyRequest.defaults(requestDefaults), + retries: autoRetryValue !== false ? maxRetryValue : 0, + noResponseRetries: autoRetryValue !== false ? maxRetryValue : 0, + shouldRetryFn(httpRespMessage) { + var _a, _b; + const err = util.parseHttpRespMessage(httpRespMessage).err; + if ((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.retryableErrorFn) { + return err && ((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.retryableErrorFn(err)); + } + return err && util.shouldRetryRequest(err); + }, + maxRetryDelay: (_c = config.retryOptions) === null || _c === void 0 ? void 0 : _c.maxRetryDelay, + retryDelayMultiplier: (_d = config.retryOptions) === null || _d === void 0 ? void 0 : _d.retryDelayMultiplier, + totalTimeout: (_e = config.retryOptions) === null || _e === void 0 ? void 0 : _e.totalTimeout, + }; + if (typeof reqOpts.maxRetries === 'number') { + options.retries = reqOpts.maxRetries; + options.noResponseRetries = reqOpts.maxRetries; + } + if (!config.stream) { + return (0, retry_request_1.default)(reqOpts, options, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (err, response, body) => { + util.handleResp(err, response, body, callback); + }); + } + const dup = config.stream; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let requestStream; + const isGetRequest = (reqOpts.method || 'GET').toUpperCase() === 'GET'; + if (isGetRequest) { + requestStream = (0, retry_request_1.default)(reqOpts, options); + dup.setReadable(requestStream); + } + else { + // Streaming writable HTTP requests cannot be retried. + requestStream = options.request(reqOpts); + dup.setWritable(requestStream); + } + // Replay the Request events back to the stream. + requestStream + .on('error', dup.destroy.bind(dup)) + .on('response', dup.emit.bind(dup, 'response')) + .on('complete', dup.emit.bind(dup, 'complete')); + dup.abort = requestStream.abort; + return dup; + } + /** + * Decorate the options about to be made in a request. + * + * @param {object} reqOpts - The options to be passed to `request`. + * @param {string} projectId - The project ID. + * @return {object} reqOpts - The decorated reqOpts. + */ + decorateRequest(reqOpts, projectId) { + delete reqOpts.autoPaginate; + delete reqOpts.autoPaginateVal; + delete reqOpts.objectMode; + if (reqOpts.qs !== null && typeof reqOpts.qs === 'object') { + delete reqOpts.qs.autoPaginate; + delete reqOpts.qs.autoPaginateVal; + reqOpts.qs = (0, projectify_1.replaceProjectIdToken)(reqOpts.qs, projectId); + } + if (Array.isArray(reqOpts.multipart)) { + reqOpts.multipart = reqOpts.multipart.map(part => { + return (0, projectify_1.replaceProjectIdToken)(part, projectId); + }); + } + if (reqOpts.json !== null && typeof reqOpts.json === 'object') { + delete reqOpts.json.autoPaginate; + delete reqOpts.json.autoPaginateVal; + reqOpts.json = (0, projectify_1.replaceProjectIdToken)(reqOpts.json, projectId); + } + reqOpts.uri = (0, projectify_1.replaceProjectIdToken)(reqOpts.uri, projectId); + return reqOpts; + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + isCustomType(unknown, module) { + function getConstructorName(obj) { + return obj.constructor && obj.constructor.name.toLowerCase(); + } + const moduleNameParts = module.split('/'); + const parentModuleName = moduleNameParts[0] && moduleNameParts[0].toLowerCase(); + const subModuleName = moduleNameParts[1] && moduleNameParts[1].toLowerCase(); + if (subModuleName && getConstructorName(unknown) !== subModuleName) { + return false; + } + let walkingModule = unknown; + // eslint-disable-next-line no-constant-condition + while (true) { + if (getConstructorName(walkingModule) === parentModuleName) { + return true; + } + walkingModule = walkingModule.parent; + if (!walkingModule) { + return false; + } + } + } + /** + * Given two parameters, figure out if this is either: + * - Just a callback function + * - An options object, and then a callback function + * @param optionsOrCallback An options object or callback. + * @param cb A potentially undefined callback. + */ + maybeOptionsOrCallback(optionsOrCallback, cb) { + return typeof optionsOrCallback === 'function' + ? [{}, optionsOrCallback] + : [optionsOrCallback, cb]; + } + _getDefaultHeaders(gcclGcsCmd) { + const headers = { + 'User-Agent': (0, util_js_1.getUserAgentString)(), + 'x-goog-api-client': `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${uuid.v4()}`, + }; + if (gcclGcsCmd) { + headers['x-goog-api-client'] += ` gccl-gcs-cmd/${gcclGcsCmd}`; + } + return headers; + } +} +exports.Util = Util; +/** + * Basic Passthrough Stream that records the number of bytes read + * every time the cursor is moved. + */ +class ProgressStream extends stream_1.Transform { + constructor() { + super(...arguments); + this.bytesRead = 0; + } + // eslint-disable-next-line @typescript-eslint/no-explicit-any + _transform(chunk, encoding, callback) { + this.bytesRead += chunk.length; + this.emit('progress', { bytesWritten: this.bytesRead, contentLength: '*' }); + this.push(chunk); + callback(); + } +} +const util = new Util(); +exports.util = util; + + +/***/ }), + +/***/ 8598: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Notification = void 0; +const index_js_1 = __nccwpck_require__(1325); +const promisify_1 = __nccwpck_require__(206); +/** + * The API-formatted resource description of the notification. + * + * Note: This is not guaranteed to be up-to-date when accessed. To get the + * latest record, call the `getMetadata()` method. + * + * @name Notification#metadata + * @type {object} + */ +/** + * A Notification object is created from your {@link Bucket} object using + * {@link Bucket#notification}. Use it to interact with Cloud Pub/Sub + * notifications. + * + * See {@link https://cloud.google.com/storage/docs/pubsub-notifications| Cloud Pub/Sub Notifications for Google Cloud Storage} + * + * @class + * @hideconstructor + * + * @param {Bucket} bucket The bucket instance this notification is attached to. + * @param {string} id The ID of the notification. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * + * const notification = myBucket.notification('1'); + * ``` + */ +class Notification extends index_js_1.ServiceObject { + constructor(bucket, id) { + const requestQueryObject = {}; + const methods = { + /** + * Creates a notification subscription for the bucket. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/insert| Notifications: insert} + * @method Notification#create + * + * @param {Topic|string} topic The Cloud PubSub topic to which this + * subscription publishes. If the project ID is omitted, the current + * project ID will be used. + * + * Acceptable formats are: + * - `projects/grape-spaceship-123/topics/my-topic` + * + * - `my-topic` + * @param {CreateNotificationRequest} [options] Metadata to set for + * the notification. + * @param {CreateNotificationCallback} [callback] Callback function. + * @returns {Promise} + * @throws {Error} If a valid topic is not provided. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.create(function(err, notification, apiResponse) { + * if (!err) { + * // The notification was created successfully. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.create().then(function(data) { + * const notification = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + create: true, + /** + * @typedef {array} DeleteNotificationResponse + * @property {object} 0 The full API response. + */ + /** + * Permanently deletes a notification subscription. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/delete| Notifications: delete API Documentation} + * + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {DeleteNotificationCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.delete(function(err, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.delete().then(function(data) { + * const apiResponse = data[0]; + * }); + * + * ``` + * @example include:samples/deleteNotification.js + * region_tag:storage_delete_bucket_notification + * Another example: + */ + delete: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * Get a notification and its metadata if it exists. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/get| Notifications: get API Documentation} + * + * @param {object} [options] Configuration options. + * See {@link Bucket#createNotification} for create options. + * @param {boolean} [options.autoCreate] Automatically create the object if + * it does not exist. Default: `false`. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetNotificationCallback} [callback] Callback function. + * @return {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.get(function(err, notification, apiResponse) { + * // `notification.metadata` has been populated. + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.get().then(function(data) { + * const notification = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ + get: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * Get the notification's metadata. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/get| Notifications: get API Documentation} + * + * @param {object} [options] Configuration options. + * @param {string} [options.userProject] The ID of the project which will be + * billed for the request. + * @param {GetNotificationMetadataCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.getMetadata(function(err, metadata, apiResponse) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.getMetadata().then(function(data) { + * const metadata = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/getMetadataNotifications.js + * region_tag:storage_print_pubsub_bucket_notification + * Another example: + */ + getMetadata: { + reqOpts: { + qs: requestQueryObject, + }, + }, + /** + * @typedef {array} NotificationExistsResponse + * @property {boolean} 0 Whether the notification exists or not. + */ + /** + * @callback NotificationExistsCallback + * @param {?Error} err Request error, if any. + * @param {boolean} exists Whether the notification exists or not. + */ + /** + * Check if the notification exists. + * + * @method Notification#exists + * @param {NotificationExistsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const myBucket = storage.bucket('my-bucket'); + * const notification = myBucket.notification('1'); + * + * notification.exists(function(err, exists) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * notification.exists().then(function(data) { + * const exists = data[0]; + * }); + * ``` + */ + exists: true, + }; + super({ + parent: bucket, + baseUrl: '/notificationConfigs', + id: id.toString(), + createMethod: bucket.createNotification.bind(bucket), + methods, + }); + } +} +exports.Notification = Notification; +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Notification); + + +/***/ }), + +/***/ 8043: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +var _Upload_instances, _Upload_gcclGcsCmd, _Upload_resetLocalBuffersCache, _Upload_addLocalBufferCache; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Upload = exports.PROTOCOL_REGEX = void 0; +exports.upload = upload; +exports.createURI = createURI; +exports.checkUploadStatus = checkUploadStatus; +const abort_controller_1 = __importDefault(__nccwpck_require__(7413)); +const crypto_1 = __nccwpck_require__(6982); +const gaxios = __importStar(__nccwpck_require__(7003)); +const google_auth_library_1 = __nccwpck_require__(492); +const stream_1 = __nccwpck_require__(2203); +const async_retry_1 = __importDefault(__nccwpck_require__(5195)); +const uuid = __importStar(__nccwpck_require__(2048)); +const util_js_1 = __nccwpck_require__(4557); +const util_js_2 = __nccwpck_require__(7325); +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +const package_json_helper_cjs_1 = __nccwpck_require__(1969); +const NOT_FOUND_STATUS_CODE = 404; +const RESUMABLE_INCOMPLETE_STATUS_CODE = 308; +const packageJson = (0, package_json_helper_cjs_1.getPackageJSON)(); +exports.PROTOCOL_REGEX = /^(\w*):\/\//; +class Upload extends stream_1.Writable { + constructor(cfg) { + var _a; + super(cfg); + _Upload_instances.add(this); + this.numBytesWritten = 0; + this.numRetries = 0; + this.currentInvocationId = { + checkUploadStatus: uuid.v4(), + chunk: uuid.v4(), + uri: uuid.v4(), + }; + /** + * A cache of buffers written to this instance, ready for consuming + */ + this.writeBuffers = []; + this.numChunksReadInRequest = 0; + /** + * An array of buffers used for caching the most recent upload chunk. + * We should not assume that the server received all bytes sent in the request. + * - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + */ + this.localWriteCache = []; + this.localWriteCacheByteLength = 0; + this.upstreamEnded = false; + _Upload_gcclGcsCmd.set(this, void 0); + cfg = cfg || {}; + if (!cfg.bucket || !cfg.file) { + throw new Error('A bucket and file name are required'); + } + if (cfg.offset && !cfg.uri) { + throw new RangeError('Cannot provide an `offset` without providing a `uri`'); + } + if (cfg.isPartialUpload && !cfg.chunkSize) { + throw new RangeError('Cannot set `isPartialUpload` without providing a `chunkSize`'); + } + cfg.authConfig = cfg.authConfig || {}; + cfg.authConfig.scopes = [ + 'https://www.googleapis.com/auth/devstorage.full_control', + ]; + this.authClient = cfg.authClient || new google_auth_library_1.GoogleAuth(cfg.authConfig); + const universe = cfg.universeDomain || google_auth_library_1.DEFAULT_UNIVERSE; + this.apiEndpoint = `https://storage.${universe}`; + if (cfg.apiEndpoint && cfg.apiEndpoint !== this.apiEndpoint) { + this.apiEndpoint = this.sanitizeEndpoint(cfg.apiEndpoint); + const hostname = new URL(this.apiEndpoint).hostname; + // check if it is a domain of a known universe + const isDomain = hostname === universe; + const isDefaultUniverseDomain = hostname === google_auth_library_1.DEFAULT_UNIVERSE; + // check if it is a subdomain of a known universe + // by checking a last (universe's length + 1) of a hostname + const isSubDomainOfUniverse = hostname.slice(-(universe.length + 1)) === `.${universe}`; + const isSubDomainOfDefaultUniverse = hostname.slice(-(google_auth_library_1.DEFAULT_UNIVERSE.length + 1)) === + `.${google_auth_library_1.DEFAULT_UNIVERSE}`; + if (!isDomain && + !isDefaultUniverseDomain && + !isSubDomainOfUniverse && + !isSubDomainOfDefaultUniverse) { + // a custom, non-universe domain, + // use gaxios + this.authClient = gaxios; + } + } + this.baseURI = `${this.apiEndpoint}/upload/storage/v1/b`; + this.bucket = cfg.bucket; + const cacheKeyElements = [cfg.bucket, cfg.file]; + if (typeof cfg.generation === 'number') { + cacheKeyElements.push(`${cfg.generation}`); + } + this.cacheKey = cacheKeyElements.join('/'); + this.customRequestOptions = cfg.customRequestOptions || {}; + this.file = cfg.file; + this.generation = cfg.generation; + this.kmsKeyName = cfg.kmsKeyName; + this.metadata = cfg.metadata || {}; + this.offset = cfg.offset; + this.origin = cfg.origin; + this.params = cfg.params || {}; + this.userProject = cfg.userProject; + this.chunkSize = cfg.chunkSize; + this.retryOptions = cfg.retryOptions; + this.isPartialUpload = (_a = cfg.isPartialUpload) !== null && _a !== void 0 ? _a : false; + if (cfg.key) { + if (typeof cfg.key === 'string') { + const base64Key = Buffer.from(cfg.key).toString('base64'); + this.encryption = { + key: base64Key, + hash: (0, crypto_1.createHash)('sha256').update(cfg.key).digest('base64'), + }; + } + else { + const base64Key = cfg.key.toString('base64'); + this.encryption = { + key: base64Key, + hash: (0, crypto_1.createHash)('sha256').update(cfg.key).digest('base64'), + }; + } + } + this.predefinedAcl = cfg.predefinedAcl; + if (cfg.private) + this.predefinedAcl = 'private'; + if (cfg.public) + this.predefinedAcl = 'publicRead'; + const autoRetry = cfg.retryOptions.autoRetry; + this.uriProvidedManually = !!cfg.uri; + this.uri = cfg.uri; + if (this.offset) { + // we're resuming an incomplete upload + this.numBytesWritten = this.offset; + } + this.numRetries = 0; // counter for number of retries currently executed + if (!autoRetry) { + cfg.retryOptions.maxRetries = 0; + } + this.timeOfFirstRequest = Date.now(); + const contentLength = cfg.metadata + ? Number(cfg.metadata.contentLength) + : NaN; + this.contentLength = isNaN(contentLength) ? '*' : contentLength; + __classPrivateFieldSet(this, _Upload_gcclGcsCmd, cfg[util_js_2.GCCL_GCS_CMD_KEY], "f"); + this.once('writing', () => { + if (this.uri) { + this.continueUploading(); + } + else { + this.createURI(err => { + if (err) { + return this.destroy(err); + } + this.startUploading(); + return; + }); + } + }); + } + /** + * Prevent 'finish' event until the upload has succeeded. + * + * @param fireFinishEvent The finish callback + */ + _final(fireFinishEvent = () => { }) { + this.upstreamEnded = true; + this.once('uploadFinished', fireFinishEvent); + process.nextTick(() => { + this.emit('upstreamFinished'); + // it's possible `_write` may not be called - namely for empty object uploads + this.emit('writing'); + }); + } + /** + * Handles incoming data from upstream + * + * @param chunk The chunk to append to the buffer + * @param encoding The encoding of the chunk + * @param readCallback A callback for when the buffer has been read downstream + */ + _write(chunk, encoding, readCallback = () => { }) { + // Backwards-compatible event + this.emit('writing'); + this.writeBuffers.push(typeof chunk === 'string' ? Buffer.from(chunk, encoding) : chunk); + this.once('readFromChunkBuffer', readCallback); + process.nextTick(() => this.emit('wroteToChunkBuffer')); + } + /** + * Prepends the local buffer to write buffer and resets it. + * + * @param keepLastBytes number of bytes to keep from the end of the local buffer. + */ + prependLocalBufferToUpstream(keepLastBytes) { + // Typically, the upstream write buffers should be smaller than the local + // cache, so we can save time by setting the local cache as the new + // upstream write buffer array and appending the old array to it + let initialBuffers = []; + if (keepLastBytes) { + // we only want the last X bytes + let bytesKept = 0; + while (keepLastBytes > bytesKept) { + // load backwards because we want the last X bytes + // note: `localWriteCacheByteLength` is reset below + let buf = this.localWriteCache.pop(); + if (!buf) + break; + bytesKept += buf.byteLength; + if (bytesKept > keepLastBytes) { + // we have gone over the amount desired, let's keep the last X bytes + // of this buffer + const diff = bytesKept - keepLastBytes; + buf = buf.subarray(diff); + bytesKept -= diff; + } + initialBuffers.unshift(buf); + } + } + else { + // we're keeping all of the local cache, simply use it as the initial buffer + initialBuffers = this.localWriteCache; + } + // Append the old upstream to the new + const append = this.writeBuffers; + this.writeBuffers = initialBuffers; + for (const buf of append) { + this.writeBuffers.push(buf); + } + // reset last buffers sent + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); + } + /** + * Retrieves data from upstream's buffer. + * + * @param limit The maximum amount to return from the buffer. + */ + *pullFromChunkBuffer(limit) { + while (limit) { + const buf = this.writeBuffers.shift(); + if (!buf) + break; + let bufToYield = buf; + if (buf.byteLength > limit) { + bufToYield = buf.subarray(0, limit); + this.writeBuffers.unshift(buf.subarray(limit)); + limit = 0; + } + else { + limit -= buf.byteLength; + } + yield bufToYield; + // Notify upstream we've read from the buffer and we're able to consume + // more. It can also potentially send more data down as we're currently + // iterating. + this.emit('readFromChunkBuffer'); + } + } + /** + * A handler for determining if data is ready to be read from upstream. + * + * @returns If there will be more chunks to read in the future + */ + async waitForNextChunk() { + const willBeMoreChunks = await new Promise(resolve => { + // There's data available - it should be digested + if (this.writeBuffers.length) { + return resolve(true); + } + // The upstream writable ended, we shouldn't expect any more data. + if (this.upstreamEnded) { + return resolve(false); + } + // Nothing immediate seems to be determined. We need to prepare some + // listeners to determine next steps... + const wroteToChunkBufferCallback = () => { + removeListeners(); + return resolve(true); + }; + const upstreamFinishedCallback = () => { + removeListeners(); + // this should be the last chunk, if there's anything there + if (this.writeBuffers.length) + return resolve(true); + return resolve(false); + }; + // Remove listeners when we're ready to callback. + const removeListeners = () => { + this.removeListener('wroteToChunkBuffer', wroteToChunkBufferCallback); + this.removeListener('upstreamFinished', upstreamFinishedCallback); + }; + // If there's data recently written it should be digested + this.once('wroteToChunkBuffer', wroteToChunkBufferCallback); + // If the upstream finishes let's see if there's anything to grab + this.once('upstreamFinished', upstreamFinishedCallback); + }); + return willBeMoreChunks; + } + /** + * Reads data from upstream up to the provided `limit`. + * Ends when the limit has reached or no data is expected to be pushed from upstream. + * + * @param limit The most amount of data this iterator should return. `Infinity` by default. + */ + async *upstreamIterator(limit = Infinity) { + // read from upstream chunk buffer + while (limit && (await this.waitForNextChunk())) { + // read until end or limit has been reached + for (const chunk of this.pullFromChunkBuffer(limit)) { + limit -= chunk.byteLength; + yield chunk; + } + } + } + createURI(callback) { + if (!callback) { + return this.createURIAsync(); + } + this.createURIAsync().then(r => callback(null, r), callback); + } + async createURIAsync() { + const metadata = { ...this.metadata }; + const headers = {}; + // Delete content length and content type from metadata if they exist. + // These are headers and should not be sent as part of the metadata. + if (metadata.contentLength) { + headers['X-Upload-Content-Length'] = metadata.contentLength.toString(); + delete metadata.contentLength; + } + if (metadata.contentType) { + headers['X-Upload-Content-Type'] = metadata.contentType; + delete metadata.contentType; + } + let googAPIClient = `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${this.currentInvocationId.uri}`; + if (__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")) { + googAPIClient += ` gccl-gcs-cmd/${__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")}`; + } + // Check if headers already exist before creating new ones + const reqOpts = { + method: 'POST', + url: [this.baseURI, this.bucket, 'o'].join('/'), + params: Object.assign({ + name: this.file, + uploadType: 'resumable', + }, this.params), + data: metadata, + headers: { + 'User-Agent': (0, util_js_1.getUserAgentString)(), + 'x-goog-api-client': googAPIClient, + ...headers, + }, + }; + if (metadata.contentLength) { + reqOpts.headers['X-Upload-Content-Length'] = + metadata.contentLength.toString(); + } + if (metadata.contentType) { + reqOpts.headers['X-Upload-Content-Type'] = metadata.contentType; + } + if (typeof this.generation !== 'undefined') { + reqOpts.params.ifGenerationMatch = this.generation; + } + if (this.kmsKeyName) { + reqOpts.params.kmsKeyName = this.kmsKeyName; + } + if (this.predefinedAcl) { + reqOpts.params.predefinedAcl = this.predefinedAcl; + } + if (this.origin) { + reqOpts.headers.Origin = this.origin; + } + const uri = await (0, async_retry_1.default)(async (bail) => { + var _a, _b, _c; + try { + const res = await this.makeRequest(reqOpts); + // We have successfully got a URI we can now create a new invocation id + this.currentInvocationId.uri = uuid.v4(); + return res.headers.location; + } + catch (err) { + const e = err; + const apiError = { + code: (_a = e.response) === null || _a === void 0 ? void 0 : _a.status, + name: (_b = e.response) === null || _b === void 0 ? void 0 : _b.statusText, + message: (_c = e.response) === null || _c === void 0 ? void 0 : _c.statusText, + errors: [ + { + reason: e.code, + }, + ], + }; + if (this.retryOptions.maxRetries > 0 && + this.retryOptions.retryableErrorFn(apiError)) { + throw e; + } + else { + return bail(e); + } + } + }, { + retries: this.retryOptions.maxRetries, + factor: this.retryOptions.retryDelayMultiplier, + maxTimeout: this.retryOptions.maxRetryDelay * 1000, //convert to milliseconds + maxRetryTime: this.retryOptions.totalTimeout * 1000, //convert to milliseconds + }); + this.uri = uri; + this.offset = 0; + // emit the newly generated URI for future reuse, if necessary. + this.emit('uri', uri); + return uri; + } + async continueUploading() { + var _a; + (_a = this.offset) !== null && _a !== void 0 ? _a : (await this.getAndSetOffset()); + return this.startUploading(); + } + async startUploading() { + const multiChunkMode = !!this.chunkSize; + let responseReceived = false; + this.numChunksReadInRequest = 0; + if (!this.offset) { + this.offset = 0; + } + // Check if the offset (server) is too far behind the current stream + if (this.offset < this.numBytesWritten) { + const delta = this.numBytesWritten - this.offset; + const message = `The offset is lower than the number of bytes written. The server has ${this.offset} bytes and while ${this.numBytesWritten} bytes has been uploaded - thus ${delta} bytes are missing. Stopping as this could result in data loss. Initiate a new upload to continue.`; + this.emit('error', new RangeError(message)); + return; + } + // Check if we should 'fast-forward' to the relevant data to upload + if (this.numBytesWritten < this.offset) { + // 'fast-forward' to the byte where we need to upload. + // only push data from the byte after the one we left off on + const fastForwardBytes = this.offset - this.numBytesWritten; + for await (const _chunk of this.upstreamIterator(fastForwardBytes)) { + _chunk; // discard the data up until the point we want + } + this.numBytesWritten = this.offset; + } + let expectedUploadSize = undefined; + // Set `expectedUploadSize` to `contentLength - this.numBytesWritten`, if available + if (typeof this.contentLength === 'number') { + expectedUploadSize = this.contentLength - this.numBytesWritten; + } + // `expectedUploadSize` should be no more than the `chunkSize`. + // It's possible this is the last chunk request for a multiple + // chunk upload, thus smaller than the chunk size. + if (this.chunkSize) { + expectedUploadSize = expectedUploadSize + ? Math.min(this.chunkSize, expectedUploadSize) + : this.chunkSize; + } + // A queue for the upstream data + const upstreamQueue = this.upstreamIterator(expectedUploadSize); + // The primary read stream for this request. This stream retrieves no more + // than the exact requested amount from upstream. + const requestStream = new stream_1.Readable({ + read: async () => { + // Don't attempt to retrieve data upstream if we already have a response + if (responseReceived) + requestStream.push(null); + const result = await upstreamQueue.next(); + if (result.value) { + this.numChunksReadInRequest++; + if (multiChunkMode) { + // save ever buffer used in the request in multi-chunk mode + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_addLocalBufferCache).call(this, result.value); + } + else { + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_addLocalBufferCache).call(this, result.value); + } + this.numBytesWritten += result.value.byteLength; + this.emit('progress', { + bytesWritten: this.numBytesWritten, + contentLength: this.contentLength, + }); + requestStream.push(result.value); + } + if (result.done) { + requestStream.push(null); + } + }, + }); + let googAPIClient = `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${this.currentInvocationId.chunk}`; + if (__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")) { + googAPIClient += ` gccl-gcs-cmd/${__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")}`; + } + const headers = { + 'User-Agent': (0, util_js_1.getUserAgentString)(), + 'x-goog-api-client': googAPIClient, + }; + // If using multiple chunk upload, set appropriate header + if (multiChunkMode) { + // We need to know how much data is available upstream to set the `Content-Range` header. + // https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + for await (const chunk of this.upstreamIterator(expectedUploadSize)) { + // This will conveniently track and keep the size of the buffers. + // We will reach either the expected upload size or the remainder of the stream. + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_addLocalBufferCache).call(this, chunk); + } + // This is the sum from the `#addLocalBufferCache` calls + const bytesToUpload = this.localWriteCacheByteLength; + // Important: we want to know if the upstream has ended and the queue is empty before + // unshifting data back into the queue. This way we will know if this is the last request or not. + const isLastChunkOfUpload = !(await this.waitForNextChunk()); + // Important: put the data back in the queue for the actual upload + this.prependLocalBufferToUpstream(); + let totalObjectSize = this.contentLength; + if (typeof this.contentLength !== 'number' && + isLastChunkOfUpload && + !this.isPartialUpload) { + // Let's let the server know this is the last chunk of the object since we didn't set it before. + totalObjectSize = bytesToUpload + this.numBytesWritten; + } + // `- 1` as the ending byte is inclusive in the request. + const endingByte = bytesToUpload + this.numBytesWritten - 1; + // `Content-Length` for multiple chunk uploads is the size of the chunk, + // not the overall object + headers['Content-Length'] = bytesToUpload; + headers['Content-Range'] = + `bytes ${this.offset}-${endingByte}/${totalObjectSize}`; + } + else { + headers['Content-Range'] = `bytes ${this.offset}-*/${this.contentLength}`; + } + const reqOpts = { + method: 'PUT', + url: this.uri, + headers, + body: requestStream, + }; + try { + const resp = await this.makeRequestStream(reqOpts); + if (resp) { + responseReceived = true; + await this.responseHandler(resp); + } + } + catch (e) { + const err = e; + if (this.retryOptions.retryableErrorFn(err)) { + this.attemptDelayedRetry({ + status: NaN, + data: err, + }); + return; + } + this.destroy(err); + } + } + // Process the API response to look for errors that came in + // the response body. + async responseHandler(resp) { + if (resp.data.error) { + this.destroy(resp.data.error); + return; + } + // At this point we can safely create a new id for the chunk + this.currentInvocationId.chunk = uuid.v4(); + const moreDataToUpload = await this.waitForNextChunk(); + const shouldContinueWithNextMultiChunkRequest = this.chunkSize && + resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE && + resp.headers.range && + moreDataToUpload; + /** + * This is true when we're expecting to upload more data in a future request, + * yet the upstream for the upload session has been exhausted. + */ + const shouldContinueUploadInAnotherRequest = this.isPartialUpload && + resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE && + !moreDataToUpload; + if (shouldContinueWithNextMultiChunkRequest) { + // Use the upper value in this header to determine where to start the next chunk. + // We should not assume that the server received all bytes sent in the request. + // https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + const range = resp.headers.range; + this.offset = Number(range.split('-')[1]) + 1; + // We should not assume that the server received all bytes sent in the request. + // - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload + const missingBytes = this.numBytesWritten - this.offset; + if (missingBytes) { + // As multi-chunk uploads send one chunk per request and pulls one + // chunk into the pipeline, prepending the missing bytes back should + // be fine for the next request. + this.prependLocalBufferToUpstream(missingBytes); + this.numBytesWritten -= missingBytes; + } + else { + // No bytes missing - no need to keep the local cache + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); + } + // continue uploading next chunk + this.continueUploading(); + } + else if (!this.isSuccessfulResponse(resp.status) && + !shouldContinueUploadInAnotherRequest) { + const err = new Error('Upload failed'); + err.code = resp.status; + err.name = 'Upload failed'; + if (resp === null || resp === void 0 ? void 0 : resp.data) { + err.errors = [resp === null || resp === void 0 ? void 0 : resp.data]; + } + this.destroy(err); + } + else { + // no need to keep the cache + __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); + if (resp && resp.data) { + resp.data.size = Number(resp.data.size); + } + this.emit('metadata', resp.data); + // Allow the object (Upload) to continue naturally so the user's + // "finish" event fires. + this.emit('uploadFinished'); + } + } + /** + * Check the status of an existing resumable upload. + * + * @param cfg A configuration to use. `uri` is required. + * @returns the current upload status + */ + async checkUploadStatus(config = {}) { + let googAPIClient = `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${this.currentInvocationId.checkUploadStatus}`; + if (__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")) { + googAPIClient += ` gccl-gcs-cmd/${__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")}`; + } + const opts = { + method: 'PUT', + url: this.uri, + headers: { + 'Content-Length': 0, + 'Content-Range': 'bytes */*', + 'User-Agent': (0, util_js_1.getUserAgentString)(), + 'x-goog-api-client': googAPIClient, + }, + }; + try { + const resp = await this.makeRequest(opts); + // Successfully got the offset we can now create a new offset invocation id + this.currentInvocationId.checkUploadStatus = uuid.v4(); + return resp; + } + catch (e) { + if (config.retry === false || + !(e instanceof Error) || + !this.retryOptions.retryableErrorFn(e)) { + throw e; + } + const retryDelay = this.getRetryDelay(); + if (retryDelay <= 0) { + throw e; + } + await new Promise(res => setTimeout(res, retryDelay)); + return this.checkUploadStatus(config); + } + } + async getAndSetOffset() { + try { + // we want to handle retries in this method. + const resp = await this.checkUploadStatus({ retry: false }); + if (resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE) { + if (typeof resp.headers.range === 'string') { + this.offset = Number(resp.headers.range.split('-')[1]) + 1; + return; + } + } + this.offset = 0; + } + catch (e) { + const err = e; + if (this.retryOptions.retryableErrorFn(err)) { + this.attemptDelayedRetry({ + status: NaN, + data: err, + }); + return; + } + this.destroy(err); + } + } + async makeRequest(reqOpts) { + if (this.encryption) { + reqOpts.headers = reqOpts.headers || {}; + reqOpts.headers['x-goog-encryption-algorithm'] = 'AES256'; + reqOpts.headers['x-goog-encryption-key'] = this.encryption.key.toString(); + reqOpts.headers['x-goog-encryption-key-sha256'] = + this.encryption.hash.toString(); + } + if (this.userProject) { + reqOpts.params = reqOpts.params || {}; + reqOpts.params.userProject = this.userProject; + } + // Let gaxios know we will handle a 308 error code ourselves. + reqOpts.validateStatus = (status) => { + return (this.isSuccessfulResponse(status) || + status === RESUMABLE_INCOMPLETE_STATUS_CODE); + }; + const combinedReqOpts = { + ...this.customRequestOptions, + ...reqOpts, + headers: { + ...this.customRequestOptions.headers, + ...reqOpts.headers, + }, + }; + const res = await this.authClient.request(combinedReqOpts); + if (res.data && res.data.error) { + throw res.data.error; + } + return res; + } + async makeRequestStream(reqOpts) { + const controller = new abort_controller_1.default(); + const errorCallback = () => controller.abort(); + this.once('error', errorCallback); + if (this.userProject) { + reqOpts.params = reqOpts.params || {}; + reqOpts.params.userProject = this.userProject; + } + reqOpts.signal = controller.signal; + reqOpts.validateStatus = () => true; + const combinedReqOpts = { + ...this.customRequestOptions, + ...reqOpts, + headers: { + ...this.customRequestOptions.headers, + ...reqOpts.headers, + }, + }; + const res = await this.authClient.request(combinedReqOpts); + const successfulRequest = this.onResponse(res); + this.removeListener('error', errorCallback); + return successfulRequest ? res : null; + } + /** + * @return {bool} is the request good? + */ + onResponse(resp) { + if (resp.status !== 200 && + this.retryOptions.retryableErrorFn({ + code: resp.status, + message: resp.statusText, + name: resp.statusText, + })) { + this.attemptDelayedRetry(resp); + return false; + } + this.emit('response', resp); + return true; + } + /** + * @param resp GaxiosResponse object from previous attempt + */ + attemptDelayedRetry(resp) { + if (this.numRetries < this.retryOptions.maxRetries) { + if (resp.status === NOT_FOUND_STATUS_CODE && + this.numChunksReadInRequest === 0) { + this.startUploading(); + } + else { + const retryDelay = this.getRetryDelay(); + if (retryDelay <= 0) { + this.destroy(new Error(`Retry total time limit exceeded - ${JSON.stringify(resp.data)}`)); + return; + } + // Unshift the local cache back in case it's needed for the next request. + this.numBytesWritten -= this.localWriteCacheByteLength; + this.prependLocalBufferToUpstream(); + // We don't know how much data has been received by the server. + // `continueUploading` will recheck the offset via `getAndSetOffset`. + // If `offset` < `numberBytesReceived` then we will raise a RangeError + // as we've streamed too much data that has been missed - this should + // not be the case for multi-chunk uploads as `lastChunkSent` is the + // body of the entire request. + this.offset = undefined; + setTimeout(this.continueUploading.bind(this), retryDelay); + } + this.numRetries++; + } + else { + this.destroy(new Error(`Retry limit exceeded - ${JSON.stringify(resp.data)}`)); + } + } + /** + * The amount of time to wait before retrying the request, in milliseconds. + * If negative, do not retry. + * + * @returns the amount of time to wait, in milliseconds. + */ + getRetryDelay() { + const randomMs = Math.round(Math.random() * 1000); + const waitTime = Math.pow(this.retryOptions.retryDelayMultiplier, this.numRetries) * + 1000 + + randomMs; + const maxAllowableDelayMs = this.retryOptions.totalTimeout * 1000 - + (Date.now() - this.timeOfFirstRequest); + const maxRetryDelayMs = this.retryOptions.maxRetryDelay * 1000; + return Math.min(waitTime, maxRetryDelayMs, maxAllowableDelayMs); + } + /* + * Prepare user-defined API endpoint for compatibility with our API. + */ + sanitizeEndpoint(url) { + if (!exports.PROTOCOL_REGEX.test(url)) { + url = `https://${url}`; + } + return url.replace(/\/+$/, ''); // Remove trailing slashes + } + /** + * Check if a given status code is 2xx + * + * @param status The status code to check + * @returns if the status is 2xx + */ + isSuccessfulResponse(status) { + return status >= 200 && status < 300; + } +} +exports.Upload = Upload; +_Upload_gcclGcsCmd = new WeakMap(), _Upload_instances = new WeakSet(), _Upload_resetLocalBuffersCache = function _Upload_resetLocalBuffersCache() { + this.localWriteCache = []; + this.localWriteCacheByteLength = 0; +}, _Upload_addLocalBufferCache = function _Upload_addLocalBufferCache(buf) { + this.localWriteCache.push(buf); + this.localWriteCacheByteLength += buf.byteLength; +}; +function upload(cfg) { + return new Upload(cfg); +} +function createURI(cfg, callback) { + const up = new Upload(cfg); + if (!callback) { + return up.createURI(); + } + up.createURI().then(r => callback(null, r), callback); +} +/** + * Check the status of an existing resumable upload. + * + * @param cfg A configuration to use. `uri` is required. + * @returns the current upload status + */ +function checkUploadStatus(cfg) { + const up = new Upload(cfg); + return up.checkUploadStatus(); +} + + +/***/ }), + +/***/ 7319: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SigningError = exports.URLSigner = exports.PATH_STYLED_HOST = exports.SignerExceptionMessages = void 0; +const crypto = __importStar(__nccwpck_require__(6982)); +const url = __importStar(__nccwpck_require__(7016)); +const storage_js_1 = __nccwpck_require__(2848); +const util_js_1 = __nccwpck_require__(4557); +var SignerExceptionMessages; +(function (SignerExceptionMessages) { + SignerExceptionMessages["ACCESSIBLE_DATE_INVALID"] = "The accessible at date provided was invalid."; + SignerExceptionMessages["EXPIRATION_BEFORE_ACCESSIBLE_DATE"] = "An expiration date cannot be before accessible date."; + SignerExceptionMessages["X_GOOG_CONTENT_SHA256"] = "The header X-Goog-Content-SHA256 must be a hexadecimal string."; +})(SignerExceptionMessages || (exports.SignerExceptionMessages = SignerExceptionMessages = {})); +/* + * Default signing version for getSignedUrl is 'v2'. + */ +const DEFAULT_SIGNING_VERSION = 'v2'; +const SEVEN_DAYS = 7 * 24 * 60 * 60; +/** + * @const {string} + * @deprecated - unused + */ +exports.PATH_STYLED_HOST = 'https://storage.googleapis.com'; +class URLSigner { + constructor(auth, bucket, file, + /** + * A {@link Storage} object. + * + * @privateRemarks + * + * Technically this is a required field, however it would be a breaking change to + * move it before optional properties. In the next major we should refactor the + * constructor of this class to only accept a config object. + */ + storage = new storage_js_1.Storage()) { + this.auth = auth; + this.bucket = bucket; + this.file = file; + this.storage = storage; + } + getSignedUrl(cfg) { + const expiresInSeconds = this.parseExpires(cfg.expires); + const method = cfg.method; + const accessibleAtInSeconds = this.parseAccessibleAt(cfg.accessibleAt); + if (expiresInSeconds < accessibleAtInSeconds) { + throw new Error(SignerExceptionMessages.EXPIRATION_BEFORE_ACCESSIBLE_DATE); + } + let customHost; + // Default style is `path`. + const isVirtualHostedStyle = cfg.virtualHostedStyle || false; + if (cfg.cname) { + customHost = cfg.cname; + } + else if (isVirtualHostedStyle) { + customHost = `https://${this.bucket.name}.storage.${this.storage.universeDomain}`; + } + const secondsToMilliseconds = 1000; + const config = Object.assign({}, cfg, { + method, + expiration: expiresInSeconds, + accessibleAt: new Date(secondsToMilliseconds * accessibleAtInSeconds), + bucket: this.bucket.name, + file: this.file ? (0, util_js_1.encodeURI)(this.file.name, false) : undefined, + }); + if (customHost) { + config.cname = customHost; + } + const version = cfg.version || DEFAULT_SIGNING_VERSION; + let promise; + if (version === 'v2') { + promise = this.getSignedUrlV2(config); + } + else if (version === 'v4') { + promise = this.getSignedUrlV4(config); + } + else { + throw new Error(`Invalid signed URL version: ${version}. Supported versions are 'v2' and 'v4'.`); + } + return promise.then(query => { + var _a; + query = Object.assign(query, cfg.queryParams); + const signedUrl = new url.URL(((_a = cfg.host) === null || _a === void 0 ? void 0 : _a.toString()) || config.cname || this.storage.apiEndpoint); + signedUrl.pathname = this.getResourcePath(!!config.cname, this.bucket.name, config.file); + // eslint-disable-next-line @typescript-eslint/no-explicit-any + signedUrl.search = (0, util_js_1.qsStringify)(query); + return signedUrl.href; + }); + } + getSignedUrlV2(config) { + const canonicalHeadersString = this.getCanonicalHeaders(config.extensionHeaders || {}); + const resourcePath = this.getResourcePath(false, config.bucket, config.file); + const blobToSign = [ + config.method, + config.contentMd5 || '', + config.contentType || '', + config.expiration, + canonicalHeadersString + resourcePath, + ].join('\n'); + const sign = async () => { + var _a; + const auth = this.auth; + try { + const signature = await auth.sign(blobToSign, (_a = config.signingEndpoint) === null || _a === void 0 ? void 0 : _a.toString()); + const credentials = await auth.getCredentials(); + return { + GoogleAccessId: credentials.client_email, + Expires: config.expiration, + Signature: signature, + }; + } + catch (err) { + const error = err; + const signingErr = new SigningError(error.message); + signingErr.stack = error.stack; + throw signingErr; + } + }; + return sign(); + } + getSignedUrlV4(config) { + var _a; + config.accessibleAt = config.accessibleAt + ? config.accessibleAt + : new Date(); + const millisecondsToSeconds = 1.0 / 1000.0; + const expiresPeriodInSeconds = config.expiration - config.accessibleAt.valueOf() * millisecondsToSeconds; + // v4 limit expiration to be 7 days maximum + if (expiresPeriodInSeconds > SEVEN_DAYS) { + throw new Error(`Max allowed expiration is seven days (${SEVEN_DAYS} seconds).`); + } + const extensionHeaders = Object.assign({}, config.extensionHeaders); + const fqdn = new url.URL(((_a = config.host) === null || _a === void 0 ? void 0 : _a.toString()) || config.cname || this.storage.apiEndpoint); + extensionHeaders.host = fqdn.hostname; + if (config.contentMd5) { + extensionHeaders['content-md5'] = config.contentMd5; + } + if (config.contentType) { + extensionHeaders['content-type'] = config.contentType; + } + let contentSha256; + const sha256Header = extensionHeaders['x-goog-content-sha256']; + if (sha256Header) { + if (typeof sha256Header !== 'string' || + !/[A-Fa-f0-9]{40}/.test(sha256Header)) { + throw new Error(SignerExceptionMessages.X_GOOG_CONTENT_SHA256); + } + contentSha256 = sha256Header; + } + const signedHeaders = Object.keys(extensionHeaders) + .map(header => header.toLowerCase()) + .sort() + .join(';'); + const extensionHeadersString = this.getCanonicalHeaders(extensionHeaders); + const datestamp = (0, util_js_1.formatAsUTCISO)(config.accessibleAt); + const credentialScope = `${datestamp}/auto/storage/goog4_request`; + const sign = async () => { + var _a; + const credentials = await this.auth.getCredentials(); + const credential = `${credentials.client_email}/${credentialScope}`; + const dateISO = (0, util_js_1.formatAsUTCISO)(config.accessibleAt ? config.accessibleAt : new Date(), true); + const queryParams = { + 'X-Goog-Algorithm': 'GOOG4-RSA-SHA256', + 'X-Goog-Credential': credential, + 'X-Goog-Date': dateISO, + 'X-Goog-Expires': expiresPeriodInSeconds.toString(10), + 'X-Goog-SignedHeaders': signedHeaders, + ...(config.queryParams || {}), + }; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const canonicalQueryParams = this.getCanonicalQueryParams(queryParams); + const canonicalRequest = this.getCanonicalRequest(config.method, this.getResourcePath(!!config.cname, config.bucket, config.file), canonicalQueryParams, extensionHeadersString, signedHeaders, contentSha256); + const hash = crypto + .createHash('sha256') + .update(canonicalRequest) + .digest('hex'); + const blobToSign = [ + 'GOOG4-RSA-SHA256', + dateISO, + credentialScope, + hash, + ].join('\n'); + try { + const signature = await this.auth.sign(blobToSign, (_a = config.signingEndpoint) === null || _a === void 0 ? void 0 : _a.toString()); + const signatureHex = Buffer.from(signature, 'base64').toString('hex'); + const signedQuery = Object.assign({}, queryParams, { + 'X-Goog-Signature': signatureHex, + }); + return signedQuery; + } + catch (err) { + const error = err; + const signingErr = new SigningError(error.message); + signingErr.stack = error.stack; + throw signingErr; + } + }; + return sign(); + } + /** + * Create canonical headers for signing v4 url. + * + * The canonical headers for v4-signing a request demands header names are + * first lowercased, followed by sorting the header names. + * Then, construct the canonical headers part of the request: + * + ":" + Trim() + "\n" + * .. + * + ":" + Trim() + "\n" + * + * @param headers + * @private + */ + getCanonicalHeaders(headers) { + // Sort headers by their lowercased names + const sortedHeaders = (0, util_js_1.objectEntries)(headers) + // Convert header names to lowercase + .map(([headerName, value]) => [ + headerName.toLowerCase(), + value, + ]) + .sort((a, b) => a[0].localeCompare(b[0])); + return sortedHeaders + .filter(([, value]) => value !== undefined) + .map(([headerName, value]) => { + // - Convert Array (multi-valued header) into string, delimited by + // ',' (no space). + // - Trim leading and trailing spaces. + // - Convert sequential (2+) spaces into a single space + const canonicalValue = `${value}`.trim().replace(/\s{2,}/g, ' '); + return `${headerName}:${canonicalValue}\n`; + }) + .join(''); + } + getCanonicalRequest(method, path, query, headers, signedHeaders, contentSha256) { + return [ + method, + path, + query, + headers, + signedHeaders, + contentSha256 || 'UNSIGNED-PAYLOAD', + ].join('\n'); + } + getCanonicalQueryParams(query) { + return (0, util_js_1.objectEntries)(query) + .map(([key, value]) => [(0, util_js_1.encodeURI)(key, true), (0, util_js_1.encodeURI)(value, true)]) + .sort((a, b) => (a[0] < b[0] ? -1 : 1)) + .map(([key, value]) => `${key}=${value}`) + .join('&'); + } + getResourcePath(cname, bucket, file) { + if (cname) { + return '/' + (file || ''); + } + else if (file) { + return `/${bucket}/${file}`; + } + else { + return `/${bucket}`; + } + } + parseExpires(expires, current = new Date()) { + const expiresInMSeconds = new Date(expires).valueOf(); + if (isNaN(expiresInMSeconds)) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_INVALID); + } + if (expiresInMSeconds < current.valueOf()) { + throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_PAST); + } + return Math.floor(expiresInMSeconds / 1000); // The API expects seconds. + } + parseAccessibleAt(accessibleAt) { + const accessibleAtInMSeconds = new Date(accessibleAt || new Date()).valueOf(); + if (isNaN(accessibleAtInMSeconds)) { + throw new Error(SignerExceptionMessages.ACCESSIBLE_DATE_INVALID); + } + return Math.floor(accessibleAtInMSeconds / 1000); // The API expects seconds. + } +} +exports.URLSigner = URLSigner; +/** + * Custom error type for errors related to getting signed errors and policies. + * + * @private + */ +class SigningError extends Error { + constructor() { + super(...arguments); + this.name = 'SigningError'; + } +} +exports.SigningError = SigningError; + + +/***/ }), + +/***/ 2848: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Storage = exports.RETRYABLE_ERR_FN_DEFAULT = exports.MAX_RETRY_DELAY_DEFAULT = exports.TOTAL_TIMEOUT_DEFAULT = exports.RETRY_DELAY_MULTIPLIER_DEFAULT = exports.MAX_RETRY_DEFAULT = exports.AUTO_RETRY_DEFAULT = exports.PROTOCOL_REGEX = exports.StorageExceptionMessages = exports.ExceptionMessages = exports.IdempotencyStrategy = void 0; +const index_js_1 = __nccwpck_require__(1325); +const paginator_1 = __nccwpck_require__(9469); +const promisify_1 = __nccwpck_require__(206); +const stream_1 = __nccwpck_require__(2203); +const bucket_js_1 = __nccwpck_require__(2887); +const channel_js_1 = __nccwpck_require__(2658); +const file_js_1 = __nccwpck_require__(9975); +const util_js_1 = __nccwpck_require__(4557); +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +const package_json_helper_cjs_1 = __nccwpck_require__(1969); +const hmacKey_js_1 = __nccwpck_require__(5891); +const crc32c_js_1 = __nccwpck_require__(4317); +const google_auth_library_1 = __nccwpck_require__(492); +var IdempotencyStrategy; +(function (IdempotencyStrategy) { + IdempotencyStrategy[IdempotencyStrategy["RetryAlways"] = 0] = "RetryAlways"; + IdempotencyStrategy[IdempotencyStrategy["RetryConditional"] = 1] = "RetryConditional"; + IdempotencyStrategy[IdempotencyStrategy["RetryNever"] = 2] = "RetryNever"; +})(IdempotencyStrategy || (exports.IdempotencyStrategy = IdempotencyStrategy = {})); +var ExceptionMessages; +(function (ExceptionMessages) { + ExceptionMessages["EXPIRATION_DATE_INVALID"] = "The expiration date provided was invalid."; + ExceptionMessages["EXPIRATION_DATE_PAST"] = "An expiration date cannot be in the past."; +})(ExceptionMessages || (exports.ExceptionMessages = ExceptionMessages = {})); +var StorageExceptionMessages; +(function (StorageExceptionMessages) { + StorageExceptionMessages["BUCKET_NAME_REQUIRED"] = "A bucket name is needed to use Cloud Storage."; + StorageExceptionMessages["BUCKET_NAME_REQUIRED_CREATE"] = "A name is required to create a bucket."; + StorageExceptionMessages["HMAC_SERVICE_ACCOUNT"] = "The first argument must be a service account email to create an HMAC key."; + StorageExceptionMessages["HMAC_ACCESS_ID"] = "An access ID is needed to create an HmacKey object."; +})(StorageExceptionMessages || (exports.StorageExceptionMessages = StorageExceptionMessages = {})); +exports.PROTOCOL_REGEX = /^(\w*):\/\//; +/** + * Default behavior: Automatically retry retriable server errors. + * + * @const {boolean} + */ +exports.AUTO_RETRY_DEFAULT = true; +/** + * Default behavior: Only attempt to retry retriable errors 3 times. + * + * @const {number} + */ +exports.MAX_RETRY_DEFAULT = 3; +/** + * Default behavior: Wait twice as long as previous retry before retrying. + * + * @const {number} + */ +exports.RETRY_DELAY_MULTIPLIER_DEFAULT = 2; +/** + * Default behavior: If the operation doesn't succeed after 600 seconds, + * stop retrying. + * + * @const {number} + */ +exports.TOTAL_TIMEOUT_DEFAULT = 600; +/** + * Default behavior: Wait no more than 64 seconds between retries. + * + * @const {number} + */ +exports.MAX_RETRY_DELAY_DEFAULT = 64; +/** + * Default behavior: Retry conditionally idempotent operations if correct preconditions are set. + * + * @const {enum} + * @private + */ +const IDEMPOTENCY_STRATEGY_DEFAULT = IdempotencyStrategy.RetryConditional; +/** + * Returns true if the API request should be retried, given the error that was + * given the first time the request was attempted. + * @const + * @param {error} err - The API error to check if it is appropriate to retry. + * @return {boolean} True if the API request should be retried, false otherwise. + */ +const RETRYABLE_ERR_FN_DEFAULT = function (err) { + var _a; + const isConnectionProblem = (reason) => { + return (reason.includes('eai_again') || // DNS lookup error + reason === 'econnreset' || + reason === 'unexpected connection closure' || + reason === 'epipe' || + reason === 'socket connection timeout'); + }; + if (err) { + if ([408, 429, 500, 502, 503, 504].indexOf(err.code) !== -1) { + return true; + } + if (typeof err.code === 'string') { + if (['408', '429', '500', '502', '503', '504'].indexOf(err.code) !== -1) { + return true; + } + const reason = err.code.toLowerCase(); + if (isConnectionProblem(reason)) { + return true; + } + } + if (err.errors) { + for (const e of err.errors) { + const reason = (_a = e === null || e === void 0 ? void 0 : e.reason) === null || _a === void 0 ? void 0 : _a.toString().toLowerCase(); + if (reason && isConnectionProblem(reason)) { + return true; + } + } + } + } + return false; +}; +exports.RETRYABLE_ERR_FN_DEFAULT = RETRYABLE_ERR_FN_DEFAULT; +/*! Developer Documentation + * + * Invoke this method to create a new Storage object bound with pre-determined + * configuration options. For each object that can be created (e.g., a bucket), + * there is an equivalent static and instance method. While they are classes, + * they can be instantiated without use of the `new` keyword. + */ +/** + * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share objects with other + * users and allow other users to access your buckets and objects. + * + * This object provides constants to refer to the three permission levels that + * can be granted to an entity: + * + * - `gcs.acl.OWNER_ROLE` - ("OWNER") + * - `gcs.acl.READER_ROLE` - ("READER") + * - `gcs.acl.WRITER_ROLE` - ("WRITER") + * + * See {@link https://cloud.google.com/storage/docs/access-control/lists| About Access Control Lists} + * + * @name Storage#acl + * @type {object} + * @property {string} OWNER_ROLE + * @property {string} READER_ROLE + * @property {string} WRITER_ROLE + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const albums = storage.bucket('albums'); + * + * //- + * // Make all of the files currently in a bucket publicly readable. + * //- + * const options = { + * entity: 'allUsers', + * role: storage.acl.READER_ROLE + * }; + * + * albums.acl.add(options, function(err, aclObject) {}); + * + * //- + * // Make any new objects added to a bucket publicly readable. + * //- + * albums.acl.default.add(options, function(err, aclObject) {}); + * + * //- + * // Grant a user ownership permissions to a bucket. + * //- + * albums.acl.add({ + * entity: 'user-useremail@example.com', + * role: storage.acl.OWNER_ROLE + * }, function(err, aclObject) {}); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * albums.acl.add(options).then(function(data) { + * const aclObject = data[0]; + * const apiResponse = data[1]; + * }); + * ``` + */ +/** + * Get {@link Bucket} objects for all of the buckets in your project as + * a readable object stream. + * + * @method Storage#getBucketsStream + * @param {GetBucketsRequest} [query] Query object for listing buckets. + * @returns {ReadableStream} A readable stream that emits {@link Bucket} + * instances. + * + * @example + * ``` + * storage.getBucketsStream() + * .on('error', console.error) + * .on('data', function(bucket) { + * // bucket is a Bucket object. + * }) + * .on('end', function() { + * // All buckets retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * storage.getBucketsStream() + * .on('data', function(bucket) { + * this.end(); + * }); + * ``` + */ +/** + * Get {@link HmacKey} objects for all of the HMAC keys in the project in a + * readable object stream. + * + * @method Storage#getHmacKeysStream + * @param {GetHmacKeysOptions} [options] Configuration options. + * @returns {ReadableStream} A readable stream that emits {@link HmacKey} + * instances. + * + * @example + * ``` + * storage.getHmacKeysStream() + * .on('error', console.error) + * .on('data', function(hmacKey) { + * // hmacKey is an HmacKey object. + * }) + * .on('end', function() { + * // All HmacKey retrieved. + * }); + * + * //- + * // If you anticipate many results, you can end a stream early to prevent + * // unnecessary processing and API requests. + * //- + * storage.getHmacKeysStream() + * .on('data', function(bucket) { + * this.end(); + * }); + * ``` + */ +/** + *

ACLs

+ * Cloud Storage uses access control lists (ACLs) to manage object and + * bucket access. ACLs are the mechanism you use to share files with other users + * and allow other users to access your buckets and files. + * + * To learn more about ACLs, read this overview on + * {@link https://cloud.google.com/storage/docs/access-control| Access Control}. + * + * See {@link https://cloud.google.com/storage/docs/overview| Cloud Storage overview} + * See {@link https://cloud.google.com/storage/docs/access-control| Access Control} + * + * @class + */ +class Storage extends index_js_1.Service { + getBucketsStream() { + // placeholder body, overwritten in constructor + return new stream_1.Readable(); + } + getHmacKeysStream() { + // placeholder body, overwritten in constructor + return new stream_1.Readable(); + } + /** + * @callback Crc32cGeneratorToStringCallback + * A method returning the CRC32C as a base64-encoded string. + * + * @returns {string} + * + * @example + * Hashing the string 'data' should return 'rth90Q==' + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.toString(); // 'rth90Q==' + * ``` + **/ + /** + * @callback Crc32cGeneratorValidateCallback + * A method validating a base64-encoded CRC32C string. + * + * @param {string} [value] base64-encoded CRC32C string to validate + * @returns {boolean} + * + * @example + * Should return `true` if the value matches, `false` otherwise + * + * ```js + * const buffer = Buffer.from('data'); + * crc32c.update(buffer); + * crc32c.validate('DkjKuA=='); // false + * crc32c.validate('rth90Q=='); // true + * ``` + **/ + /** + * @callback Crc32cGeneratorUpdateCallback + * A method for passing `Buffer`s for CRC32C generation. + * + * @param {Buffer} [data] data to update CRC32C value with + * @returns {undefined} + * + * @example + * Hashing buffers from 'some ' and 'text\n' + * + * ```js + * const buffer1 = Buffer.from('some '); + * crc32c.update(buffer1); + * + * const buffer2 = Buffer.from('text\n'); + * crc32c.update(buffer2); + * + * crc32c.toString(); // 'DkjKuA==' + * ``` + **/ + /** + * @typedef {object} CRC32CValidator + * @property {Crc32cGeneratorToStringCallback} + * @property {Crc32cGeneratorValidateCallback} + * @property {Crc32cGeneratorUpdateCallback} + */ + /** + * @callback Crc32cGeneratorCallback + * @returns {CRC32CValidator} + */ + /** + * @typedef {object} StorageOptions + * @property {string} [projectId] The project ID from the Google Developer's + * Console, e.g. 'grape-spaceship-123'. We will also check the environment + * variable `GCLOUD_PROJECT` for your project ID. If your app is running + * in an environment which supports {@link + * https://cloud.google.com/docs/authentication/production#providing_credentials_to_your_application + * Application Default Credentials}, your project ID will be detected + * automatically. + * @property {string} [keyFilename] Full path to the a .json, .pem, or .p12 key + * downloaded from the Google Developers Console. If you provide a path to + * a JSON file, the `projectId` option above is not necessary. NOTE: .pem and + * .p12 require you to specify the `email` option as well. + * @property {string} [email] Account email address. Required when using a .pem + * or .p12 keyFilename. + * @property {object} [credentials] Credentials object. + * @property {string} [credentials.client_email] + * @property {string} [credentials.private_key] + * @property {object} [retryOptions] Options for customizing retries. Retriable server errors + * will be retried with exponential delay between them dictated by the formula + * max(maxRetryDelay, retryDelayMultiplier*retryNumber) until maxRetries or totalTimeout + * has been reached. Retries will only happen if autoRetry is set to true. + * @property {boolean} [retryOptions.autoRetry=true] Automatically retry requests if the + * response is related to rate limits or certain intermittent server + * errors. We will exponentially backoff subsequent requests by default. + * @property {number} [retryOptions.retryDelayMultiplier = 2] the multiplier by which to + * increase the delay time between the completion of failed requests, and the + * initiation of the subsequent retrying request. + * @property {number} [retryOptions.totalTimeout = 600] The total time, starting from + * when the initial request is sent, after which an error will + * be returned, regardless of the retrying attempts made meanwhile. + * @property {number} [retryOptions.maxRetryDelay = 64] The maximum delay time between requests. + * When this value is reached, ``retryDelayMultiplier`` will no longer be used to + * increase delay time. + * @property {number} [retryOptions.maxRetries=3] Maximum number of automatic retries + * attempted before returning the error. + * @property {function} [retryOptions.retryableErrorFn] Function that returns true if a given + * error should be retried and false otherwise. + * @property {enum} [retryOptions.idempotencyStrategy=IdempotencyStrategy.RetryConditional] Enumeration + * controls how conditionally idempotent operations are retried. Possible values are: RetryAlways - + * will respect other retry settings and attempt to retry conditionally idempotent operations. RetryConditional - + * will retry conditionally idempotent operations if the correct preconditions are set. RetryNever - never + * retry a conditionally idempotent operation. + * @property {string} [userAgent] The value to be prepended to the User-Agent + * header in API requests. + * @property {object} [authClient] `AuthClient` or `GoogleAuth` client to reuse instead of creating a new one. + * @property {number} [timeout] The amount of time in milliseconds to wait per http request before timing out. + * @property {object[]} [interceptors_] Array of custom request interceptors to be returned in the order they were assigned. + * @property {string} [apiEndpoint = storage.google.com] The API endpoint of the service used to make requests. + * @property {boolean} [useAuthWithCustomEndpoint = false] Controls whether or not to use authentication when using a custom endpoint. + * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C} + */ + /** + * Constructs the Storage client. + * + * @example + * Create a client that uses Application Default Credentials + * (ADC) + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * ``` + * + * @example + * Create a client with explicit credentials + * ``` + * const storage = new Storage({ + * projectId: 'your-project-id', + * keyFilename: '/path/to/keyfile.json' + * }); + * ``` + * + * @example + * Create a client with credentials passed + * by value as a JavaScript object + * ``` + * const storage = new Storage({ + * projectId: 'your-project-id', + * credentials: { + * type: 'service_account', + * project_id: 'xxxxxxx', + * private_key_id: 'xxxx', + * private_key:'-----BEGIN PRIVATE KEY-----xxxxxxx\n-----END PRIVATE KEY-----\n', + * client_email: 'xxxx', + * client_id: 'xxx', + * auth_uri: 'https://accounts.google.com/o/oauth2/auth', + * token_uri: 'https://oauth2.googleapis.com/token', + * auth_provider_x509_cert_url: 'https://www.googleapis.com/oauth2/v1/certs', + * client_x509_cert_url: 'xxx', + * } + * }); + * ``` + * + * @example + * Create a client with credentials passed + * by loading a JSON file directly from disk + * ``` + * const storage = new Storage({ + * projectId: 'your-project-id', + * credentials: require('/path/to-keyfile.json') + * }); + * ``` + * + * @example + * Create a client with an `AuthClient` (e.g. `DownscopedClient`) + * ``` + * const {DownscopedClient} = require('google-auth-library'); + * const authClient = new DownscopedClient({...}); + * + * const storage = new Storage({authClient}); + * ``` + * + * Additional samples: + * - https://github.com/googleapis/google-auth-library-nodejs#sample-usage-1 + * - https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/downscopedclient.js + * + * @param {StorageOptions} [options] Configuration options. + */ + constructor(options = {}) { + var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p; + const universe = options.universeDomain || google_auth_library_1.DEFAULT_UNIVERSE; + let apiEndpoint = `https://storage.${universe}`; + let customEndpoint = false; + // Note: EMULATOR_HOST is an experimental configuration variable. Use apiEndpoint instead. + const EMULATOR_HOST = process.env.STORAGE_EMULATOR_HOST; + if (typeof EMULATOR_HOST === 'string') { + apiEndpoint = Storage.sanitizeEndpoint(EMULATOR_HOST); + customEndpoint = true; + } + if (options.apiEndpoint && options.apiEndpoint !== apiEndpoint) { + apiEndpoint = Storage.sanitizeEndpoint(options.apiEndpoint); + customEndpoint = true; + } + options = Object.assign({}, options, { apiEndpoint }); + // Note: EMULATOR_HOST is an experimental configuration variable. Use apiEndpoint instead. + const baseUrl = EMULATOR_HOST || `${options.apiEndpoint}/storage/v1`; + const config = { + apiEndpoint: options.apiEndpoint, + retryOptions: { + autoRetry: ((_a = options.retryOptions) === null || _a === void 0 ? void 0 : _a.autoRetry) !== undefined + ? (_b = options.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry + : exports.AUTO_RETRY_DEFAULT, + maxRetries: ((_c = options.retryOptions) === null || _c === void 0 ? void 0 : _c.maxRetries) + ? (_d = options.retryOptions) === null || _d === void 0 ? void 0 : _d.maxRetries + : exports.MAX_RETRY_DEFAULT, + retryDelayMultiplier: ((_e = options.retryOptions) === null || _e === void 0 ? void 0 : _e.retryDelayMultiplier) + ? (_f = options.retryOptions) === null || _f === void 0 ? void 0 : _f.retryDelayMultiplier + : exports.RETRY_DELAY_MULTIPLIER_DEFAULT, + totalTimeout: ((_g = options.retryOptions) === null || _g === void 0 ? void 0 : _g.totalTimeout) + ? (_h = options.retryOptions) === null || _h === void 0 ? void 0 : _h.totalTimeout + : exports.TOTAL_TIMEOUT_DEFAULT, + maxRetryDelay: ((_j = options.retryOptions) === null || _j === void 0 ? void 0 : _j.maxRetryDelay) + ? (_k = options.retryOptions) === null || _k === void 0 ? void 0 : _k.maxRetryDelay + : exports.MAX_RETRY_DELAY_DEFAULT, + retryableErrorFn: ((_l = options.retryOptions) === null || _l === void 0 ? void 0 : _l.retryableErrorFn) + ? (_m = options.retryOptions) === null || _m === void 0 ? void 0 : _m.retryableErrorFn + : exports.RETRYABLE_ERR_FN_DEFAULT, + idempotencyStrategy: ((_o = options.retryOptions) === null || _o === void 0 ? void 0 : _o.idempotencyStrategy) !== undefined + ? (_p = options.retryOptions) === null || _p === void 0 ? void 0 : _p.idempotencyStrategy + : IDEMPOTENCY_STRATEGY_DEFAULT, + }, + baseUrl, + customEndpoint, + useAuthWithCustomEndpoint: options === null || options === void 0 ? void 0 : options.useAuthWithCustomEndpoint, + projectIdRequired: false, + scopes: [ + 'https://www.googleapis.com/auth/iam', + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/devstorage.full_control', + ], + packageJson: (0, package_json_helper_cjs_1.getPackageJSON)(), + }; + super(config, options); + /** + * Reference to {@link Storage.acl}. + * + * @name Storage#acl + * @see Storage.acl + */ + this.acl = Storage.acl; + this.crc32cGenerator = + options.crc32cGenerator || crc32c_js_1.CRC32C_DEFAULT_VALIDATOR_GENERATOR; + this.retryOptions = config.retryOptions; + this.getBucketsStream = paginator_1.paginator.streamify('getBuckets'); + this.getHmacKeysStream = paginator_1.paginator.streamify('getHmacKeys'); + } + static sanitizeEndpoint(url) { + if (!exports.PROTOCOL_REGEX.test(url)) { + url = `https://${url}`; + } + return url.replace(/\/+$/, ''); // Remove trailing slashes + } + /** + * Get a reference to a Cloud Storage bucket. + * + * @param {string} name Name of the bucket. + * @param {object} [options] Configuration object. + * @param {string} [options.kmsKeyName] A Cloud KMS key that will be used to + * encrypt objects inserted into this bucket, if no encryption method is + * specified. + * @param {string} [options.userProject] User project to be billed for all + * requests made from this Bucket object. + * @returns {Bucket} + * @see Bucket + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const albums = storage.bucket('albums'); + * const photos = storage.bucket('photos'); + * ``` + */ + bucket(name, options) { + if (!name) { + throw new Error(StorageExceptionMessages.BUCKET_NAME_REQUIRED); + } + return new bucket_js_1.Bucket(this, name, options); + } + /** + * Reference a channel to receive notifications about changes to your bucket. + * + * @param {string} id The ID of the channel. + * @param {string} resourceId The resource ID of the channel. + * @returns {Channel} + * @see Channel + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const channel = storage.channel('id', 'resource-id'); + * ``` + */ + channel(id, resourceId) { + return new channel_js_1.Channel(this, id, resourceId); + } + /** + * @typedef {array} CreateBucketResponse + * @property {Bucket} 0 The new {@link Bucket}. + * @property {object} 1 The full API response. + */ + /** + * @callback CreateBucketCallback + * @param {?Error} err Request error, if any. + * @param {Bucket} bucket The new {@link Bucket}. + * @param {object} apiResponse The full API response. + */ + /** + * Metadata to set for the bucket. + * + * @typedef {object} CreateBucketRequest + * @property {boolean} [archive=false] Specify the storage class as Archive. + * @property {object} [autoclass.enabled=false] Specify whether Autoclass is + * enabled for the bucket. + * @property {object} [autoclass.terminalStorageClass='NEARLINE'] The storage class that objects in an Autoclass bucket eventually transition to if + * they are not read for a certain length of time. Valid values are NEARLINE and ARCHIVE. + * @property {boolean} [coldline=false] Specify the storage class as Coldline. + * @property {Cors[]} [cors=[]] Specify the CORS configuration to use. + * @property {CustomPlacementConfig} [customPlacementConfig={}] Specify the bucket's regions for dual-region buckets. + * For more information, see {@link https://cloud.google.com/storage/docs/locations| Bucket Locations}. + * @property {boolean} [dra=false] Specify the storage class as Durable Reduced + * Availability. + * @property {boolean} [enableObjectRetention=false] Specifiy whether or not object retention should be enabled on this bucket. + * @property {object} [hierarchicalNamespace.enabled=false] Specify whether or not to enable hierarchical namespace on this bucket. + * @property {string} [location] Specify the bucket's location. If specifying + * a dual-region, the `customPlacementConfig` property should be set in conjunction. + * For more information, see {@link https://cloud.google.com/storage/docs/locations| Bucket Locations}. + * @property {boolean} [multiRegional=false] Specify the storage class as + * Multi-Regional. + * @property {boolean} [nearline=false] Specify the storage class as Nearline. + * @property {boolean} [regional=false] Specify the storage class as Regional. + * @property {boolean} [requesterPays=false] Force the use of the User Project metadata field to assign operational + * costs when an operation is made on a Bucket and its objects. + * @property {string} [rpo] For dual-region buckets, controls whether turbo + * replication is enabled (`ASYNC_TURBO`) or disabled (`DEFAULT`). + * @property {boolean} [standard=true] Specify the storage class as Standard. + * @property {string} [storageClass] The new storage class. (`standard`, + * `nearline`, `coldline`, or `archive`). + * **Note:** The storage classes `multi_regional`, `regional`, and + * `durable_reduced_availability` are now legacy and will be deprecated in + * the future. + * @property {Versioning} [versioning=undefined] Specify the versioning status. + * @property {string} [userProject] The ID of the project which will be billed + * for the request. + */ + /** + * Create a bucket. + * + * Cloud Storage uses a flat namespace, so you can't create a bucket with + * a name that is already in use. For more information, see + * {@link https://cloud.google.com/storage/docs/bucketnaming.html#requirements| Bucket Naming Guidelines}. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/insert| Buckets: insert API Documentation} + * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes} + * + * @param {string} name Name of the bucket to create. + * @param {CreateBucketRequest} [metadata] Metadata to set for the bucket. + * @param {CreateBucketCallback} [callback] Callback function. + * @returns {Promise} + * @throws {Error} If a name is not provided. + * @see Bucket#create + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const callback = function(err, bucket, apiResponse) { + * // `bucket` is a Bucket object. + * }; + * + * storage.createBucket('new-bucket', callback); + * + * //- + * // Create a bucket in a specific location and region. See the + * // Official JSON API docs for complete details on the `location` + * option. + * // + * //- + * const metadata = { + * location: 'US-CENTRAL1', + * regional: true + * }; + * + * storage.createBucket('new-bucket', metadata, callback); + * + * //- + * // Create a bucket with a retention policy of 6 months. + * //- + * const metadata = { + * retentionPolicy: { + * retentionPeriod: 15780000 // 6 months in seconds. + * } + * }; + * + * storage.createBucket('new-bucket', metadata, callback); + * + * //- + * // Enable versioning on a new bucket. + * //- + * const metadata = { + * versioning: { + * enabled: true + * } + * }; + * + * storage.createBucket('new-bucket', metadata, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.createBucket('new-bucket').then(function(data) { + * const bucket = data[0]; + * const apiResponse = data[1]; + * }); + * + * ``` + * @example include:samples/buckets.js + * region_tag:storage_create_bucket + * Another example: + */ + createBucket(name, metadataOrCallback, callback) { + if (!name) { + throw new Error(StorageExceptionMessages.BUCKET_NAME_REQUIRED_CREATE); + } + let metadata; + if (!callback) { + callback = metadataOrCallback; + metadata = {}; + } + else { + metadata = metadataOrCallback; + } + const body = { + ...metadata, + name, + }; + const storageClasses = { + archive: 'ARCHIVE', + coldline: 'COLDLINE', + dra: 'DURABLE_REDUCED_AVAILABILITY', + multiRegional: 'MULTI_REGIONAL', + nearline: 'NEARLINE', + regional: 'REGIONAL', + standard: 'STANDARD', + }; + const storageClassKeys = Object.keys(storageClasses); + for (const storageClass of storageClassKeys) { + if (body[storageClass]) { + if (metadata.storageClass && metadata.storageClass !== storageClass) { + throw new Error(`Both \`${storageClass}\` and \`storageClass\` were provided.`); + } + body.storageClass = storageClasses[storageClass]; + delete body[storageClass]; + } + } + if (body.requesterPays) { + body.billing = { + requesterPays: body.requesterPays, + }; + delete body.requesterPays; + } + const query = { + project: this.projectId, + }; + if (body.userProject) { + query.userProject = body.userProject; + delete body.userProject; + } + if (body.enableObjectRetention) { + query.enableObjectRetention = body.enableObjectRetention; + delete body.enableObjectRetention; + } + if (body.predefinedAcl) { + query.predefinedAcl = body.predefinedAcl; + delete body.predefinedAcl; + } + if (body.predefinedDefaultObjectAcl) { + query.predefinedDefaultObjectAcl = body.predefinedDefaultObjectAcl; + delete body.predefinedDefaultObjectAcl; + } + if (body.projection) { + query.projection = body.projection; + delete body.projection; + } + this.request({ + method: 'POST', + uri: '/b', + qs: query, + json: body, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + const bucket = this.bucket(name); + bucket.metadata = resp; + callback(null, bucket, resp); + }); + } + /** + * @typedef {object} CreateHmacKeyOptions + * @property {string} [projectId] The project ID of the project that owns + * the service account of the requested HMAC key. If not provided, + * the project ID used to instantiate the Storage client will be used. + * @property {string} [userProject] This parameter is currently ignored. + */ + /** + * @typedef {object} HmacKeyMetadata + * @property {string} accessId The access id identifies which HMAC key was + * used to sign a request when authenticating with HMAC. + * @property {string} etag Used to perform a read-modify-write of the key. + * @property {string} id The resource name of the HMAC key. + * @property {string} projectId The project ID. + * @property {string} serviceAccountEmail The service account's email this + * HMAC key is created for. + * @property {string} state The state of this HMAC key. One of "ACTIVE", + * "INACTIVE" or "DELETED". + * @property {string} timeCreated The creation time of the HMAC key in + * RFC 3339 format. + * @property {string} [updated] The time this HMAC key was last updated in + * RFC 3339 format. + */ + /** + * @typedef {array} CreateHmacKeyResponse + * @property {HmacKey} 0 The HmacKey instance created from API response. + * @property {string} 1 The HMAC key's secret used to access the XML API. + * @property {object} 3 The raw API response. + */ + /** + * @callback CreateHmacKeyCallback Callback function. + * @param {?Error} err Request error, if any. + * @param {HmacKey} hmacKey The HmacKey instance created from API response. + * @param {string} secret The HMAC key's secret used to access the XML API. + * @param {object} apiResponse The raw API response. + */ + /** + * Create an HMAC key associated with an service account to authenticate + * requests to the Cloud Storage XML API. + * + * See {@link https://cloud.google.com/storage/docs/authentication/hmackeys| HMAC keys documentation} + * + * @param {string} serviceAccountEmail The service account's email address + * with which the HMAC key is created for. + * @param {CreateHmacKeyCallback} [callback] Callback function. + * @return {Promise} + * + * @example + * ``` + * const {Storage} = require('google-cloud/storage'); + * const storage = new Storage(); + * + * // Replace with your service account's email address + * const serviceAccountEmail = + * 'my-service-account@appspot.gserviceaccount.com'; + * + * storage.createHmacKey(serviceAccountEmail, function(err, hmacKey, secret) { + * if (!err) { + * // Securely store the secret for use with the XML API. + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.createHmacKey(serviceAccountEmail) + * .then((response) => { + * const hmacKey = response[0]; + * const secret = response[1]; + * // Securely store the secret for use with the XML API. + * }); + * ``` + */ + createHmacKey(serviceAccountEmail, optionsOrCb, cb) { + if (typeof serviceAccountEmail !== 'string') { + throw new Error(StorageExceptionMessages.HMAC_SERVICE_ACCOUNT); + } + const { options, callback } = (0, util_js_1.normalize)(optionsOrCb, cb); + const query = Object.assign({}, options, { serviceAccountEmail }); + const projectId = query.projectId || this.projectId; + delete query.projectId; + this.request({ + method: 'POST', + uri: `/projects/${projectId}/hmacKeys`, + qs: query, + maxRetries: 0, //explicitly set this value since this is a non-idempotent function + }, (err, resp) => { + if (err) { + callback(err, null, null, resp); + return; + } + const metadata = resp.metadata; + const hmacKey = this.hmacKey(metadata.accessId, { + projectId: metadata.projectId, + }); + hmacKey.metadata = resp.metadata; + callback(null, hmacKey, resp.secret, resp); + }); + } + /** + * Query object for listing buckets. + * + * @typedef {object} GetBucketsRequest + * @property {boolean} [autoPaginate=true] Have pagination handled + * automatically. + * @property {number} [maxApiCalls] Maximum number of API calls to make. + * @property {number} [maxResults] Maximum number of items plus prefixes to + * return per call. + * Note: By default will handle pagination automatically + * if more than 1 page worth of results are requested per call. + * When `autoPaginate` is set to `false` the smaller of `maxResults` + * or 1 page of results will be returned per call. + * @property {string} [pageToken] A previously-returned page token + * representing part of the larger set of results to view. + * @property {string} [userProject] The ID of the project which will be billed + * for the request. + * @param {boolean} [softDeleted] If true, returns the soft-deleted object. + * Object `generation` is required if `softDeleted` is set to True. + */ + /** + * @typedef {array} GetBucketsResponse + * @property {Bucket[]} 0 Array of {@link Bucket} instances. + * @property {object} 1 nextQuery A query object to receive more results. + * @property {object} 2 The full API response. + */ + /** + * @callback GetBucketsCallback + * @param {?Error} err Request error, if any. + * @param {Bucket[]} buckets Array of {@link Bucket} instances. + * @param {object} nextQuery A query object to receive more results. + * @param {object} apiResponse The full API response. + */ + /** + * Get Bucket objects for all of the buckets in your project. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/list| Buckets: list API Documentation} + * + * @param {GetBucketsRequest} [query] Query object for listing buckets. + * @param {GetBucketsCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * storage.getBuckets(function(err, buckets) { + * if (!err) { + * // buckets is an array of Bucket objects. + * } + * }); + * + * //- + * // To control how many API requests are made and page through the results + * // manually, set `autoPaginate` to `false`. + * //- + * const callback = function(err, buckets, nextQuery, apiResponse) { + * if (nextQuery) { + * // More results exist. + * storage.getBuckets(nextQuery, callback); + * } + * + * // The `metadata` property is populated for you with the metadata at the + * // time of fetching. + * buckets[0].metadata; + * + * // However, in cases where you are concerned the metadata could have + * // changed, use the `getMetadata` method. + * buckets[0].getMetadata(function(err, metadata, apiResponse) {}); + * }; + * + * storage.getBuckets({ + * autoPaginate: false + * }, callback); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.getBuckets().then(function(data) { + * const buckets = data[0]; + * }); + * + * ``` + * @example include:samples/buckets.js + * region_tag:storage_list_buckets + * Another example: + */ + getBuckets(optionsOrCallback, cb) { + const { options, callback } = (0, util_js_1.normalize)(optionsOrCallback, cb); + options.project = options.project || this.projectId; + this.request({ + uri: '/b', + qs: options, + }, (err, resp) => { + if (err) { + callback(err, null, null, resp); + return; + } + const itemsArray = resp.items ? resp.items : []; + const buckets = itemsArray.map((bucket) => { + const bucketInstance = this.bucket(bucket.id); + bucketInstance.metadata = bucket; + return bucketInstance; + }); + const nextQuery = resp.nextPageToken + ? Object.assign({}, options, { pageToken: resp.nextPageToken }) + : null; + callback(null, buckets, nextQuery, resp); + }); + } + getHmacKeys(optionsOrCb, cb) { + const { options, callback } = (0, util_js_1.normalize)(optionsOrCb, cb); + const query = Object.assign({}, options); + const projectId = query.projectId || this.projectId; + delete query.projectId; + this.request({ + uri: `/projects/${projectId}/hmacKeys`, + qs: query, + }, (err, resp) => { + if (err) { + callback(err, null, null, resp); + return; + } + const itemsArray = resp.items ? resp.items : []; + const hmacKeys = itemsArray.map((hmacKey) => { + const hmacKeyInstance = this.hmacKey(hmacKey.accessId, { + projectId: hmacKey.projectId, + }); + hmacKeyInstance.metadata = hmacKey; + return hmacKeyInstance; + }); + const nextQuery = resp.nextPageToken + ? Object.assign({}, options, { pageToken: resp.nextPageToken }) + : null; + callback(null, hmacKeys, nextQuery, resp); + }); + } + /** + * @typedef {array} GetServiceAccountResponse + * @property {object} 0 The service account resource. + * @property {object} 1 The full + * {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| API response}. + */ + /** + * @callback GetServiceAccountCallback + * @param {?Error} err Request error, if any. + * @param {object} serviceAccount The serviceAccount resource. + * @param {string} serviceAccount.emailAddress The service account email + * address. + * @param {object} apiResponse The full + * {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| API response}. + */ + /** + * Get the email address of this project's Google Cloud Storage service + * account. + * + * See {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount/get| Projects.serviceAccount: get API Documentation} + * See {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| Projects.serviceAccount Resource} + * + * @param {object} [options] Configuration object. + * @param {string} [options.userProject] User project to be billed for this + * request. + * @param {GetServiceAccountCallback} [callback] Callback function. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * + * storage.getServiceAccount(function(err, serviceAccount, apiResponse) { + * if (!err) { + * const serviceAccountEmail = serviceAccount.emailAddress; + * } + * }); + * + * //- + * // If the callback is omitted, we'll return a Promise. + * //- + * storage.getServiceAccount().then(function(data) { + * const serviceAccountEmail = data[0].emailAddress; + * const apiResponse = data[1]; + * }); + * ``` + */ + getServiceAccount(optionsOrCallback, cb) { + const { options, callback } = (0, util_js_1.normalize)(optionsOrCallback, cb); + this.request({ + uri: `/projects/${this.projectId}/serviceAccount`, + qs: options, + }, (err, resp) => { + if (err) { + callback(err, null, resp); + return; + } + const camelCaseResponse = {}; + for (const prop in resp) { + // eslint-disable-next-line no-prototype-builtins + if (resp.hasOwnProperty(prop)) { + const camelCaseProp = prop.replace(/_(\w)/g, (_, match) => match.toUpperCase()); + camelCaseResponse[camelCaseProp] = resp[prop]; + } + } + callback(null, camelCaseResponse, resp); + }); + } + /** + * Get a reference to an HmacKey object. + * Note: this does not fetch the HMAC key's metadata. Use HmacKey#get() to + * retrieve and populate the metadata. + * + * To get a reference to an HMAC key that's not created for a service + * account in the same project used to instantiate the Storage client, + * supply the project's ID as `projectId` in the `options` argument. + * + * @param {string} accessId The HMAC key's access ID. + * @param {HmacKeyOptions} options HmacKey constructor options. + * @returns {HmacKey} + * @see HmacKey + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const hmacKey = storage.hmacKey('ACCESS_ID'); + * ``` + */ + hmacKey(accessId, options) { + if (!accessId) { + throw new Error(StorageExceptionMessages.HMAC_ACCESS_ID); + } + return new hmacKey_js_1.HmacKey(this, accessId, options); + } +} +exports.Storage = Storage; +/** + * {@link Bucket} class. + * + * @name Storage.Bucket + * @see Bucket + * @type {Constructor} + */ +Storage.Bucket = bucket_js_1.Bucket; +/** + * {@link Channel} class. + * + * @name Storage.Channel + * @see Channel + * @type {Constructor} + */ +Storage.Channel = channel_js_1.Channel; +/** + * {@link File} class. + * + * @name Storage.File + * @see File + * @type {Constructor} + */ +Storage.File = file_js_1.File; +/** + * {@link HmacKey} class. + * + * @name Storage.HmacKey + * @see HmacKey + * @type {Constructor} + */ +Storage.HmacKey = hmacKey_js_1.HmacKey; +Storage.acl = { + OWNER_ROLE: 'OWNER', + READER_ROLE: 'READER', + WRITER_ROLE: 'WRITER', +}; +/*! Developer Documentation + * + * These methods can be auto-paginated. + */ +paginator_1.paginator.extend(Storage, ['getBuckets', 'getHmacKeys']); +/*! Developer Documentation + * + * All async methods (except for streams) will return a Promise in the event + * that a callback is omitted. + */ +(0, promisify_1.promisifyAll)(Storage, { + exclude: ['bucket', 'channel', 'hmacKey'], +}); + + +/***/ }), + +/***/ 4: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +/*! + * Copyright 2022 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +var _XMLMultiPartUploadHelper_instances, _XMLMultiPartUploadHelper_setGoogApiClientHeaders, _XMLMultiPartUploadHelper_handleErrorResponse; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.TransferManager = exports.MultiPartUploadError = void 0; +const file_js_1 = __nccwpck_require__(9975); +const p_limit_1 = __importDefault(__nccwpck_require__(9977)); +const path = __importStar(__nccwpck_require__(6928)); +const fs_1 = __nccwpck_require__(9896); +const crc32c_js_1 = __nccwpck_require__(4317); +const google_auth_library_1 = __nccwpck_require__(492); +const fast_xml_parser_1 = __nccwpck_require__(9741); +const async_retry_1 = __importDefault(__nccwpck_require__(5195)); +const crypto_1 = __nccwpck_require__(6982); +const util_js_1 = __nccwpck_require__(7325); +const util_js_2 = __nccwpck_require__(4557); +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +const package_json_helper_cjs_1 = __nccwpck_require__(1969); +const packageJson = (0, package_json_helper_cjs_1.getPackageJSON)(); +/** + * Default number of concurrently executing promises to use when calling uploadManyFiles. + * + */ +const DEFAULT_PARALLEL_UPLOAD_LIMIT = 5; +/** + * Default number of concurrently executing promises to use when calling downloadManyFiles. + * + */ +const DEFAULT_PARALLEL_DOWNLOAD_LIMIT = 5; +/** + * Default number of concurrently executing promises to use when calling downloadFileInChunks. + * + */ +const DEFAULT_PARALLEL_CHUNKED_DOWNLOAD_LIMIT = 5; +/** + * The minimum size threshold in bytes at which to apply a chunked download strategy when calling downloadFileInChunks. + * + */ +const DOWNLOAD_IN_CHUNKS_FILE_SIZE_THRESHOLD = 32 * 1024 * 1024; +/** + * The chunk size in bytes to use when calling downloadFileInChunks. + * + */ +const DOWNLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE = 32 * 1024 * 1024; +/** + * The chunk size in bytes to use when calling uploadFileInChunks. + * + */ +const UPLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE = 32 * 1024 * 1024; +/** + * Default number of concurrently executing promises to use when calling uploadFileInChunks. + * + */ +const DEFAULT_PARALLEL_CHUNKED_UPLOAD_LIMIT = 5; +const EMPTY_REGEX = '(?:)'; +/** + * The `gccl-gcs-cmd` value for the `X-Goog-API-Client` header. + * Example: `gccl-gcs-cmd/tm.upload_many` + * + * @see {@link GCCL_GCS_CMD}. + * @see {@link GCCL_GCS_CMD_KEY}. + */ +const GCCL_GCS_CMD_FEATURE = { + UPLOAD_MANY: 'tm.upload_many', + DOWNLOAD_MANY: 'tm.download_many', + UPLOAD_SHARDED: 'tm.upload_sharded', + DOWNLOAD_SHARDED: 'tm.download_sharded', +}; +const defaultMultiPartGenerator = (bucket, fileName, uploadId, partsMap) => { + return new XMLMultiPartUploadHelper(bucket, fileName, uploadId, partsMap); +}; +class MultiPartUploadError extends Error { + constructor(message, uploadId, partsMap) { + super(message); + this.uploadId = uploadId; + this.partsMap = partsMap; + } +} +exports.MultiPartUploadError = MultiPartUploadError; +/** + * Class representing an implementation of MPU in the XML API. This class is not meant for public usage. + * + * @private + * + */ +class XMLMultiPartUploadHelper { + constructor(bucket, fileName, uploadId, partsMap) { + _XMLMultiPartUploadHelper_instances.add(this); + this.authClient = bucket.storage.authClient || new google_auth_library_1.GoogleAuth(); + this.uploadId = uploadId || ''; + this.bucket = bucket; + this.fileName = fileName; + this.baseUrl = `https://${bucket.name}.${new URL(this.bucket.storage.apiEndpoint).hostname}/${fileName}`; + this.xmlBuilder = new fast_xml_parser_1.XMLBuilder({ arrayNodeName: 'Part' }); + this.xmlParser = new fast_xml_parser_1.XMLParser(); + this.partsMap = partsMap || new Map(); + this.retryOptions = { + retries: this.bucket.storage.retryOptions.maxRetries, + factor: this.bucket.storage.retryOptions.retryDelayMultiplier, + maxTimeout: this.bucket.storage.retryOptions.maxRetryDelay * 1000, + maxRetryTime: this.bucket.storage.retryOptions.totalTimeout * 1000, + }; + } + /** + * Initiates a multipart upload (MPU) to the XML API and stores the resultant upload id. + * + * @returns {Promise} + */ + async initiateUpload(headers = {}) { + const url = `${this.baseUrl}?uploads`; + return (0, async_retry_1.default)(async (bail) => { + try { + const res = await this.authClient.request({ + headers: __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_setGoogApiClientHeaders).call(this, headers), + method: 'POST', + url, + }); + if (res.data && res.data.error) { + throw res.data.error; + } + const parsedXML = this.xmlParser.parse(res.data); + this.uploadId = parsedXML.InitiateMultipartUploadResult.UploadId; + } + catch (e) { + __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail); + } + }, this.retryOptions); + } + /** + * Uploads the provided chunk of data to the XML API using the previously created upload id. + * + * @param {number} partNumber the sequence number of this chunk. + * @param {Buffer} chunk the chunk of data to be uploaded. + * @param {string | false} validation whether or not to include the md5 hash in the headers to cause the server + * to validate the chunk was not corrupted. + * @returns {Promise} + */ + async uploadPart(partNumber, chunk, validation) { + const url = `${this.baseUrl}?partNumber=${partNumber}&uploadId=${this.uploadId}`; + let headers = __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_setGoogApiClientHeaders).call(this); + if (validation === 'md5') { + const hash = (0, crypto_1.createHash)('md5').update(chunk).digest('base64'); + headers = { + 'Content-MD5': hash, + }; + } + return (0, async_retry_1.default)(async (bail) => { + try { + const res = await this.authClient.request({ + url, + method: 'PUT', + body: chunk, + headers, + }); + if (res.data && res.data.error) { + throw res.data.error; + } + this.partsMap.set(partNumber, res.headers['etag']); + } + catch (e) { + __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail); + } + }, this.retryOptions); + } + /** + * Sends the final request of the MPU to tell GCS the upload is now complete. + * + * @returns {Promise} + */ + async completeUpload() { + const url = `${this.baseUrl}?uploadId=${this.uploadId}`; + const sortedMap = new Map([...this.partsMap.entries()].sort((a, b) => a[0] - b[0])); + const parts = []; + for (const entry of sortedMap.entries()) { + parts.push({ PartNumber: entry[0], ETag: entry[1] }); + } + const body = `${this.xmlBuilder.build(parts)}`; + return (0, async_retry_1.default)(async (bail) => { + try { + const res = await this.authClient.request({ + headers: __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_setGoogApiClientHeaders).call(this), + url, + method: 'POST', + body, + }); + if (res.data && res.data.error) { + throw res.data.error; + } + return res; + } + catch (e) { + __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail); + return; + } + }, this.retryOptions); + } + /** + * Aborts an multipart upload that is in progress. Once aborted, any parts in the process of being uploaded fail, + * and future requests using the upload ID fail. + * + * @returns {Promise} + */ + async abortUpload() { + const url = `${this.baseUrl}?uploadId=${this.uploadId}`; + return (0, async_retry_1.default)(async (bail) => { + try { + const res = await this.authClient.request({ + url, + method: 'DELETE', + }); + if (res.data && res.data.error) { + throw res.data.error; + } + } + catch (e) { + __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail); + return; + } + }, this.retryOptions); + } +} +_XMLMultiPartUploadHelper_instances = new WeakSet(), _XMLMultiPartUploadHelper_setGoogApiClientHeaders = function _XMLMultiPartUploadHelper_setGoogApiClientHeaders(headers = {}) { + let headerFound = false; + let userAgentFound = false; + for (const [key, value] of Object.entries(headers)) { + if (key.toLocaleLowerCase().trim() === 'x-goog-api-client') { + headerFound = true; + // Prepend command feature to value, if not already there + if (!value.includes(GCCL_GCS_CMD_FEATURE.UPLOAD_SHARDED)) { + headers[key] = + `${value} gccl-gcs-cmd/${GCCL_GCS_CMD_FEATURE.UPLOAD_SHARDED}`; + } + } + else if (key.toLocaleLowerCase().trim() === 'user-agent') { + userAgentFound = true; + } + } + // If the header isn't present, add it + if (!headerFound) { + headers['x-goog-api-client'] = `${(0, util_js_2.getRuntimeTrackingString)()} gccl/${packageJson.version} gccl-gcs-cmd/${GCCL_GCS_CMD_FEATURE.UPLOAD_SHARDED}`; + } + // If the User-Agent isn't present, add it + if (!userAgentFound) { + headers['User-Agent'] = (0, util_js_2.getUserAgentString)(); + } + return headers; +}, _XMLMultiPartUploadHelper_handleErrorResponse = function _XMLMultiPartUploadHelper_handleErrorResponse(err, bail) { + if (this.bucket.storage.retryOptions.autoRetry && + this.bucket.storage.retryOptions.retryableErrorFn(err)) { + throw err; + } + else { + bail(err); + } +}; +/** + * Create a TransferManager object to perform parallel transfer operations on a Cloud Storage bucket. + * + * @class + * @hideconstructor + * + * @param {Bucket} bucket A {@link Bucket} instance + * + */ +class TransferManager { + constructor(bucket) { + this.bucket = bucket; + } + /** + * @typedef {object} UploadManyFilesOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when uploading the files. + * @property {Function} [customDestinationBuilder] A fuction that will take the current path of a local file + * and return a string representing a custom path to be used to upload the file to GCS. + * @property {boolean} [skipIfExists] Do not upload the file if it already exists in + * the bucket. This will set the precondition ifGenerationMatch = 0. + * @property {string} [prefix] A prefix to append to all of the uploaded files. + * @property {object} [passthroughOptions] {@link UploadOptions} Options to be passed through + * to each individual upload operation. + * + */ + /** + * Upload multiple files in parallel to the bucket. This is a convenience method + * that utilizes {@link Bucket#upload} to perform the upload. + * + * @param {array | string} [filePathsOrDirectory] An array of fully qualified paths to the files or a directory name. + * If a directory name is provided, the directory will be recursively walked and all files will be added to the upload list. + * to be uploaded to the bucket + * @param {UploadManyFilesOptions} [options] Configuration options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Upload multiple files in parallel. + * //- + * const response = await transferManager.uploadManyFiles(['/local/path/file1.txt, 'local/path/file2.txt']); + * // Your bucket now contains: + * // - "local/path/file1.txt" (with the contents of '/local/path/file1.txt') + * // - "local/path/file2.txt" (with the contents of '/local/path/file2.txt') + * const response = await transferManager.uploadManyFiles('/local/directory'); + * // Your bucket will now contain all files contained in '/local/directory' maintaining the subdirectory structure. + * ``` + * + */ + async uploadManyFiles(filePathsOrDirectory, options = {}) { + var _a; + if (options.skipIfExists && ((_a = options.passthroughOptions) === null || _a === void 0 ? void 0 : _a.preconditionOpts)) { + options.passthroughOptions.preconditionOpts.ifGenerationMatch = 0; + } + else if (options.skipIfExists && + options.passthroughOptions === undefined) { + options.passthroughOptions = { + preconditionOpts: { + ifGenerationMatch: 0, + }, + }; + } + const limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_UPLOAD_LIMIT); + const promises = []; + let allPaths = []; + if (!Array.isArray(filePathsOrDirectory)) { + for await (const curPath of this.getPathsFromDirectory(filePathsOrDirectory)) { + allPaths.push(curPath); + } + } + else { + allPaths = filePathsOrDirectory; + } + for (const filePath of allPaths) { + const stat = await fs_1.promises.lstat(filePath); + if (stat.isDirectory()) { + continue; + } + const passThroughOptionsCopy = { + ...options.passthroughOptions, + [util_js_1.GCCL_GCS_CMD_KEY]: GCCL_GCS_CMD_FEATURE.UPLOAD_MANY, + }; + passThroughOptionsCopy.destination = options.customDestinationBuilder + ? options.customDestinationBuilder(filePath, options) + : filePath.split(path.sep).join(path.posix.sep); + if (options.prefix) { + passThroughOptionsCopy.destination = path.posix.join(...options.prefix.split(path.sep), passThroughOptionsCopy.destination); + } + promises.push(limit(() => this.bucket.upload(filePath, passThroughOptionsCopy))); + } + return Promise.all(promises); + } + /** + * @typedef {object} DownloadManyFilesOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when downloading the files. + * @property {string} [prefix] A prefix to append to all of the downloaded files. + * @property {string} [stripPrefix] A prefix to remove from all of the downloaded files. + * @property {object} [passthroughOptions] {@link DownloadOptions} Options to be passed through + * to each individual download operation. + * @property {boolean} [skipIfExists] Do not download the file if it already exists in + * the destination. + * + */ + /** + * Download multiple files in parallel to the local filesystem. This is a convenience method + * that utilizes {@link File#download} to perform the download. + * + * @param {array | string} [filesOrFolder] An array of file name strings or file objects to be downloaded. If + * a string is provided this will be treated as a GCS prefix and all files with that prefix will be downloaded. + * @param {DownloadManyFilesOptions} [options] Configuration options. Setting options.prefix or options.stripPrefix + * or options.passthroughOptions.destination will cause the downloaded files to be written to the file system + * instead of being returned as a buffer. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Download multiple files in parallel. + * //- + * const response = await transferManager.downloadManyFiles(['file1.txt', 'file2.txt']); + * // The following files have been downloaded: + * // - "file1.txt" (with the contents from my-bucket.file1.txt) + * // - "file2.txt" (with the contents from my-bucket.file2.txt) + * const response = await transferManager.downloadManyFiles([bucket.File('file1.txt'), bucket.File('file2.txt')]); + * // The following files have been downloaded: + * // - "file1.txt" (with the contents from my-bucket.file1.txt) + * // - "file2.txt" (with the contents from my-bucket.file2.txt) + * const response = await transferManager.downloadManyFiles('test-folder'); + * // All files with GCS prefix of 'test-folder' have been downloaded. + * ``` + * + */ + async downloadManyFiles(filesOrFolder, options = {}) { + const limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_DOWNLOAD_LIMIT); + const promises = []; + let files = []; + if (!Array.isArray(filesOrFolder)) { + const directoryFiles = await this.bucket.getFiles({ + prefix: filesOrFolder, + }); + files = directoryFiles[0]; + } + else { + files = filesOrFolder.map(curFile => { + if (typeof curFile === 'string') { + return this.bucket.file(curFile); + } + return curFile; + }); + } + const stripRegexString = options.stripPrefix + ? `^${options.stripPrefix}` + : EMPTY_REGEX; + const regex = new RegExp(stripRegexString, 'g'); + for (const file of files) { + const passThroughOptionsCopy = { + ...options.passthroughOptions, + [util_js_1.GCCL_GCS_CMD_KEY]: GCCL_GCS_CMD_FEATURE.DOWNLOAD_MANY, + }; + if (options.prefix || passThroughOptionsCopy.destination) { + passThroughOptionsCopy.destination = path.join(options.prefix || '', passThroughOptionsCopy.destination || '', file.name); + } + if (options.stripPrefix) { + passThroughOptionsCopy.destination = file.name.replace(regex, ''); + } + if (options.skipIfExists && + (0, fs_1.existsSync)(passThroughOptionsCopy.destination || '')) { + continue; + } + promises.push(limit(async () => { + const destination = passThroughOptionsCopy.destination; + if (destination && destination.endsWith(path.sep)) { + await fs_1.promises.mkdir(destination, { recursive: true }); + return Promise.resolve([ + Buffer.alloc(0), + ]); + } + return file.download(passThroughOptionsCopy); + })); + } + return Promise.all(promises); + } + /** + * @typedef {object} DownloadFileInChunksOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when downloading the file. + * @property {number} [chunkSizeBytes] The size in bytes of each chunk to be downloaded. + * @property {string | boolean} [validation] Whether or not to perform a CRC32C validation check when download is complete. + * @property {boolean} [noReturnData] Whether or not to return the downloaded data. A `true` value here would be useful for files with a size that will not fit into memory. + * + */ + /** + * Download a large file in chunks utilizing parallel download operations. This is a convenience method + * that utilizes {@link File#download} to perform the download. + * + * @param {File | string} fileOrName {@link File} to download. + * @param {DownloadFileInChunksOptions} [options] Configuration options. + * @returns {Promise} + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Download a large file in chunks utilizing parallel operations. + * //- + * const response = await transferManager.downloadFileInChunks(bucket.file('large-file.txt'); + * // Your local directory now contains: + * // - "large-file.txt" (with the contents from my-bucket.large-file.txt) + * ``` + * + */ + async downloadFileInChunks(fileOrName, options = {}) { + let chunkSize = options.chunkSizeBytes || DOWNLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE; + let limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_CHUNKED_DOWNLOAD_LIMIT); + const noReturnData = Boolean(options.noReturnData); + const promises = []; + const file = typeof fileOrName === 'string' + ? this.bucket.file(fileOrName) + : fileOrName; + const fileInfo = await file.get(); + const size = parseInt(fileInfo[0].metadata.size.toString()); + // If the file size does not meet the threshold download it as a single chunk. + if (size < DOWNLOAD_IN_CHUNKS_FILE_SIZE_THRESHOLD) { + limit = (0, p_limit_1.default)(1); + chunkSize = size; + } + let start = 0; + const filePath = options.destination || path.basename(file.name); + const fileToWrite = await fs_1.promises.open(filePath, 'w'); + while (start < size) { + const chunkStart = start; + let chunkEnd = start + chunkSize - 1; + chunkEnd = chunkEnd > size ? size : chunkEnd; + promises.push(limit(async () => { + const resp = await file.download({ + start: chunkStart, + end: chunkEnd, + [util_js_1.GCCL_GCS_CMD_KEY]: GCCL_GCS_CMD_FEATURE.DOWNLOAD_SHARDED, + }); + const result = await fileToWrite.write(resp[0], 0, resp[0].length, chunkStart); + if (noReturnData) + return; + return result.buffer; + })); + start += chunkSize; + } + let chunks; + try { + chunks = await Promise.all(promises); + } + finally { + await fileToWrite.close(); + } + if (options.validation === 'crc32c' && fileInfo[0].metadata.crc32c) { + const downloadedCrc32C = await crc32c_js_1.CRC32C.fromFile(filePath); + if (!downloadedCrc32C.validate(fileInfo[0].metadata.crc32c)) { + const mismatchError = new file_js_1.RequestError(file_js_1.FileExceptionMessages.DOWNLOAD_MISMATCH); + mismatchError.code = 'CONTENT_DOWNLOAD_MISMATCH'; + throw mismatchError; + } + } + if (noReturnData) + return; + return [Buffer.concat(chunks, size)]; + } + /** + * @typedef {object} UploadFileInChunksOptions + * @property {number} [concurrencyLimit] The number of concurrently executing promises + * to use when uploading the file. + * @property {number} [chunkSizeBytes] The size in bytes of each chunk to be uploaded. + * @property {string} [uploadName] Name of the file when saving to GCS. If ommitted the name is taken from the file path. + * @property {number} [maxQueueSize] The number of chunks to be uploaded to hold in memory concurrently. If not specified + * defaults to the specified concurrency limit. + * @property {string} [uploadId] If specified attempts to resume a previous upload. + * @property {Map} [partsMap] If specified alongside uploadId, attempts to resume a previous upload from the last chunk + * specified in partsMap + * @property {object} [headers] headers to be sent when initiating the multipart upload. + * See {@link https://cloud.google.com/storage/docs/xml-api/post-object-multipart#request_headers| Request Headers: Initiate a Multipart Upload} + * @property {boolean} [autoAbortFailure] boolean to indicate if an in progress upload session will be automatically aborted upon failure. If not set, + * failures will be automatically aborted. + * + */ + /** + * Upload a large file in chunks utilizing parallel upload opertions. If the upload fails, an uploadId and + * map containing all the successfully uploaded parts will be returned to the caller. These arguments can be used to + * resume the upload. + * + * @param {string} [filePath] The path of the file to be uploaded + * @param {UploadFileInChunksOptions} [options] Configuration options. + * @param {MultiPartHelperGenerator} [generator] A function that will return a type that implements the MPU interface. Most users will not need to use this. + * @returns {Promise} If successful a promise resolving to void, otherwise a error containing the message, uploadid, and parts map. + * + * @example + * ``` + * const {Storage} = require('@google-cloud/storage'); + * const storage = new Storage(); + * const bucket = storage.bucket('my-bucket'); + * const transferManager = new TransferManager(bucket); + * + * //- + * // Upload a large file in chunks utilizing parallel operations. + * //- + * const response = await transferManager.uploadFileInChunks('large-file.txt'); + * // Your bucket now contains: + * // - "large-file.txt" + * ``` + * + * + */ + async uploadFileInChunks(filePath, options = {}, generator = defaultMultiPartGenerator) { + const chunkSize = options.chunkSizeBytes || UPLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE; + const limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_CHUNKED_UPLOAD_LIMIT); + const maxQueueSize = options.maxQueueSize || + options.concurrencyLimit || + DEFAULT_PARALLEL_CHUNKED_UPLOAD_LIMIT; + const fileName = options.uploadName || path.basename(filePath); + const mpuHelper = generator(this.bucket, fileName, options.uploadId, options.partsMap); + let partNumber = 1; + let promises = []; + try { + if (options.uploadId === undefined) { + await mpuHelper.initiateUpload(options.headers); + } + const startOrResumptionByte = mpuHelper.partsMap.size * chunkSize; + const readStream = (0, fs_1.createReadStream)(filePath, { + highWaterMark: chunkSize, + start: startOrResumptionByte, + }); + // p-limit only limits the number of running promises. We do not want to hold an entire + // large file in memory at once so promises acts a queue that will hold only maxQueueSize in memory. + for await (const curChunk of readStream) { + if (promises.length >= maxQueueSize) { + await Promise.all(promises); + promises = []; + } + promises.push(limit(() => mpuHelper.uploadPart(partNumber++, curChunk, options.validation))); + } + await Promise.all(promises); + return await mpuHelper.completeUpload(); + } + catch (e) { + if ((options.autoAbortFailure === undefined || options.autoAbortFailure) && + mpuHelper.uploadId) { + try { + await mpuHelper.abortUpload(); + return; + } + catch (e) { + throw new MultiPartUploadError(e.message, mpuHelper.uploadId, mpuHelper.partsMap); + } + } + throw new MultiPartUploadError(e.message, mpuHelper.uploadId, mpuHelper.partsMap); + } + } + async *getPathsFromDirectory(directory) { + const filesAndSubdirectories = await fs_1.promises.readdir(directory, { + withFileTypes: true, + }); + for (const curFileOrDirectory of filesAndSubdirectories) { + const fullPath = path.join(directory, curFileOrDirectory.name); + curFileOrDirectory.isDirectory() + ? yield* this.getPathsFromDirectory(fullPath) + : yield fullPath; + } + } +} +exports.TransferManager = TransferManager; + + +/***/ }), + +/***/ 4557: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +var __createBinding = this && this.__createBinding || (Object.create ? function (o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { + enumerable: true, + get: function () { + return m[k]; + } + }; + } + Object.defineProperty(o, k2, desc); +} : function (o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +}); +var __setModuleDefault = this && this.__setModuleDefault || (Object.create ? function (o, v) { + Object.defineProperty(o, "default", { + enumerable: true, + value: v + }); +} : function (o, v) { + o["default"] = v; +}); +var __importStar = this && this.__importStar || function () { + var ownKeys = function (o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +}(); +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports.PassThroughShim = void 0; +exports.normalize = normalize; +exports.objectEntries = objectEntries; +exports.fixedEncodeURIComponent = fixedEncodeURIComponent; +exports.encodeURI = encodeURI; +exports.qsStringify = qsStringify; +exports.objectKeyToLowercase = objectKeyToLowercase; +exports.unicodeJSONStringify = unicodeJSONStringify; +exports.convertObjKeysToSnakeCase = convertObjKeysToSnakeCase; +exports.formatAsUTCISO = formatAsUTCISO; +exports.getRuntimeTrackingString = getRuntimeTrackingString; +exports.getUserAgentString = getUserAgentString; +exports.getDirName = getDirName; +exports.getModuleFormat = getModuleFormat; +const path = __importStar(__nccwpck_require__(6928)); +const querystring = __importStar(__nccwpck_require__(3480)); +const stream_1 = __nccwpck_require__(2203); +const url = __importStar(__nccwpck_require__(7016)); +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +const package_json_helper_cjs_1 = __nccwpck_require__(1969); +// Done to avoid a problem with mangling of identifiers when using esModuleInterop +const fileURLToPath = url.fileURLToPath; +const isEsm = false; +function normalize(optionsOrCallback, cb) { + const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; + const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; + return { + options, + callback + }; +} +/** + * Flatten an object into an Array of arrays, [[key, value], ..]. + * Implements Object.entries() for Node.js <8 + * @internal + */ +function objectEntries(obj) { + return Object.keys(obj).map(key => [key, obj[key]]); +} +/** + * Encode `str` with encodeURIComponent, plus these + * reserved characters: `! * ' ( )`. + * + * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/encodeURIComponent| MDN: fixedEncodeURIComponent} + * + * @param {string} str The URI component to encode. + * @return {string} The encoded string. + */ +function fixedEncodeURIComponent(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, c => '%' + c.charCodeAt(0).toString(16).toUpperCase()); +} +/** + * URI encode `uri` for generating signed URLs, using fixedEncodeURIComponent. + * + * Encode every byte except `A-Z a-Z 0-9 ~ - . _`. + * + * @param {string} uri The URI to encode. + * @param [boolean=false] encodeSlash If `true`, the "/" character is not encoded. + * @return {string} The encoded string. + */ +function encodeURI(uri, encodeSlash) { + // Split the string by `/`, and conditionally rejoin them with either + // %2F if encodeSlash is `true`, or '/' if `false`. + return uri.split('/').map(fixedEncodeURIComponent).join(encodeSlash ? '%2F' : '/'); +} +/** + * Serialize an object to a URL query string using util.encodeURI(uri, true). + * @param {string} url The object to serialize. + * @return {string} Serialized string. + */ +function qsStringify(qs) { + return querystring.stringify(qs, '&', '=', { + encodeURIComponent: component => encodeURI(component, true) + }); +} +function objectKeyToLowercase(object) { + const newObj = {}; + for (let key of Object.keys(object)) { + const value = object[key]; + key = key.toLowerCase(); + newObj[key] = value; + } + return newObj; +} +/** + * JSON encode str, with unicode \u+ representation. + * @param {object} obj The object to encode. + * @return {string} Serialized string. + */ +function unicodeJSONStringify(obj) { + return JSON.stringify(obj).replace(/[\u0080-\uFFFF]/g, char => '\\u' + ('0000' + char.charCodeAt(0).toString(16)).slice(-4)); +} +/** + * Converts the given objects keys to snake_case + * @param {object} obj object to convert keys to snake case. + * @returns {object} object with keys converted to snake case. + */ +function convertObjKeysToSnakeCase(obj) { + if (obj instanceof Date || obj instanceof RegExp) { + return obj; + } + if (Array.isArray(obj)) { + return obj.map(convertObjKeysToSnakeCase); + } + if (obj instanceof Object) { + return Object.keys(obj).reduce((acc, cur) => { + const s = cur[0].toLocaleLowerCase() + cur.slice(1).replace(/([A-Z]+)/g, (match, p1) => { + return `_${p1.toLowerCase()}`; + }); + acc[s] = convertObjKeysToSnakeCase(obj[cur]); + return acc; + }, Object()); + } + return obj; +} +/** + * Formats the provided date object as a UTC ISO string. + * @param {Date} dateTimeToFormat date object to be formatted. + * @param {boolean} includeTime flag to include hours, minutes, seconds in output. + * @param {string} dateDelimiter delimiter between date components. + * @param {string} timeDelimiter delimiter between time components. + * @returns {string} UTC ISO format of provided date obect. + */ +function formatAsUTCISO(dateTimeToFormat, includeTime = false, dateDelimiter = '', timeDelimiter = '') { + const year = dateTimeToFormat.getUTCFullYear(); + const month = dateTimeToFormat.getUTCMonth() + 1; + const day = dateTimeToFormat.getUTCDate(); + const hour = dateTimeToFormat.getUTCHours(); + const minute = dateTimeToFormat.getUTCMinutes(); + const second = dateTimeToFormat.getUTCSeconds(); + let resultString = `${year.toString().padStart(4, '0')}${dateDelimiter}${month.toString().padStart(2, '0')}${dateDelimiter}${day.toString().padStart(2, '0')}`; + if (includeTime) { + resultString = `${resultString}T${hour.toString().padStart(2, '0')}${timeDelimiter}${minute.toString().padStart(2, '0')}${timeDelimiter}${second.toString().padStart(2, '0')}Z`; + } + return resultString; +} +/** + * Examines the runtime environment and returns the appropriate tracking string. + * @returns {string} metrics tracking string based on the current runtime environment. + */ +function getRuntimeTrackingString() { + if ( + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + globalThis.Deno && + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + globalThis.Deno.version && + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + globalThis.Deno.version.deno) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + return `gl-deno/${globalThis.Deno.version.deno}`; + } else { + return `gl-node/${process.versions.node}`; + } +} +/** + * Looks at package.json and creates the user-agent string to be applied to request headers. + * @returns {string} user agent string. + */ +function getUserAgentString() { + const pkg = (0, package_json_helper_cjs_1.getPackageJSON)(); + const hyphenatedPackageName = pkg.name.replace('@google-cloud', 'gcloud-node') // For legacy purposes. + .replace('/', '-'); // For UA spec-compliance purposes. + return hyphenatedPackageName + '/' + pkg.version; +} +function getDirName() { + let dirToUse = ''; + try { + dirToUse = __dirname; + } catch (e) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + dirToUse = __dirname; + } + return dirToUse; +} +function getModuleFormat() { + return isEsm ? 'ESM' : 'CJS'; +} +class PassThroughShim extends stream_1.PassThrough { + constructor() { + super(...arguments); + this.shouldEmitReading = true; + this.shouldEmitWriting = true; + } + _read(size) { + if (this.shouldEmitReading) { + this.emit('reading'); + this.shouldEmitReading = false; + } + super._read(size); + } + _write(chunk, encoding, callback) { + if (this.shouldEmitWriting) { + this.emit('writing'); + this.shouldEmitWriting = false; + } + // Per the nodejs documention, callback must be invoked on the next tick + process.nextTick(() => { + super._write(chunk, encoding, callback); + }); + } + _final(callback) { + // If the stream is empty (i.e. empty file) final will be invoked before _read / _write + // and we should still emit the proper events. + if (this.shouldEmitReading) { + this.emit('reading'); + this.shouldEmitReading = false; + } + if (this.shouldEmitWriting) { + this.emit('writing'); + this.shouldEmitWriting = false; + } + callback(null); + } +} +exports.PassThroughShim = PassThroughShim; + + +/***/ }), + +/***/ 3660: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __assign = (this && this.__assign) || function () { + __assign = Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; + }; + return __assign.apply(this, arguments); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.encode = encode; +exports.decodeEntity = decodeEntity; +exports.decode = decode; +var named_references_js_1 = __nccwpck_require__(6000); +var numeric_unicode_map_js_1 = __nccwpck_require__(1057); +var surrogate_pairs_js_1 = __nccwpck_require__(9718); +var allNamedReferences = __assign(__assign({}, named_references_js_1.namedReferences), { all: named_references_js_1.namedReferences.html5 }); +var encodeRegExps = { + specialChars: /[<>'"&]/g, + nonAscii: /[<>'"&\u0080-\uD7FF\uE000-\uFFFF\uDC00-\uDFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]?/g, + nonAsciiPrintable: /[<>'"&\x01-\x08\x11-\x15\x17-\x1F\x7f-\uD7FF\uE000-\uFFFF\uDC00-\uDFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]?/g, + nonAsciiPrintableOnly: /[\x01-\x08\x11-\x15\x17-\x1F\x7f-\uD7FF\uE000-\uFFFF\uDC00-\uDFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]?/g, + extensive: /[\x01-\x0c\x0e-\x1f\x21-\x2c\x2e-\x2f\x3a-\x40\x5b-\x60\x7b-\x7d\x7f-\uD7FF\uE000-\uFFFF\uDC00-\uDFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]?/g +}; +var defaultEncodeOptions = { + mode: 'specialChars', + level: 'all', + numeric: 'decimal' +}; +/** Encodes all the necessary (specified by `level`) characters in the text */ +function encode(text, _a) { + var _b = _a === void 0 ? defaultEncodeOptions : _a, _c = _b.mode, mode = _c === void 0 ? 'specialChars' : _c, _d = _b.numeric, numeric = _d === void 0 ? 'decimal' : _d, _e = _b.level, level = _e === void 0 ? 'all' : _e; + if (!text) { + return ''; + } + var encodeRegExp = encodeRegExps[mode]; + var references = allNamedReferences[level].characters; + var isHex = numeric === 'hexadecimal'; + return String.prototype.replace.call(text, encodeRegExp, function (input) { + var result = references[input]; + if (!result) { + var code = input.length > 1 ? (0, surrogate_pairs_js_1.getCodePoint)(input, 0) : input.charCodeAt(0); + result = (isHex ? '&#x' + code.toString(16) : '&#' + code) + ';'; + } + return result; + }); +} +var defaultDecodeOptions = { + scope: 'body', + level: 'all' +}; +var strict = /&(?:#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+);/g; +var attribute = /&(?:#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+)[;=]?/g; +var baseDecodeRegExps = { + xml: { + strict: strict, + attribute: attribute, + body: named_references_js_1.bodyRegExps.xml + }, + html4: { + strict: strict, + attribute: attribute, + body: named_references_js_1.bodyRegExps.html4 + }, + html5: { + strict: strict, + attribute: attribute, + body: named_references_js_1.bodyRegExps.html5 + } +}; +var decodeRegExps = __assign(__assign({}, baseDecodeRegExps), { all: baseDecodeRegExps.html5 }); +var fromCharCode = String.fromCharCode; +var outOfBoundsChar = fromCharCode(65533); +var defaultDecodeEntityOptions = { + level: 'all' +}; +function getDecodedEntity(entity, references, isAttribute, isStrict) { + var decodeResult = entity; + var decodeEntityLastChar = entity[entity.length - 1]; + if (isAttribute && decodeEntityLastChar === '=') { + decodeResult = entity; + } + else if (isStrict && decodeEntityLastChar !== ';') { + decodeResult = entity; + } + else { + var decodeResultByReference = references[entity]; + if (decodeResultByReference) { + decodeResult = decodeResultByReference; + } + else if (entity[0] === '&' && entity[1] === '#') { + var decodeSecondChar = entity[2]; + var decodeCode = decodeSecondChar == 'x' || decodeSecondChar == 'X' + ? parseInt(entity.substr(3), 16) + : parseInt(entity.substr(2)); + decodeResult = + decodeCode >= 0x10ffff + ? outOfBoundsChar + : decodeCode > 65535 + ? (0, surrogate_pairs_js_1.fromCodePoint)(decodeCode) + : fromCharCode(numeric_unicode_map_js_1.numericUnicodeMap[decodeCode] || decodeCode); + } + } + return decodeResult; +} +/** Decodes a single entity */ +function decodeEntity(entity, _a) { + var _b = _a === void 0 ? defaultDecodeEntityOptions : _a, _c = _b.level, level = _c === void 0 ? 'all' : _c; + if (!entity) { + return ''; + } + return getDecodedEntity(entity, allNamedReferences[level].entities, false, false); +} +/** Decodes all entities in the text */ +function decode(text, _a) { + var _b = _a === void 0 ? defaultDecodeOptions : _a, _c = _b.level, level = _c === void 0 ? 'all' : _c, _d = _b.scope, scope = _d === void 0 ? level === 'xml' ? 'strict' : 'body' : _d; + if (!text) { + return ''; + } + var decodeRegExp = decodeRegExps[level][scope]; + var references = allNamedReferences[level].entities; + var isAttribute = scope === 'attribute'; + var isStrict = scope === 'strict'; + return text.replace(decodeRegExp, function (entity) { return getDecodedEntity(entity, references, isAttribute, isStrict); }); +} +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 6000: +/***/ (function(__unused_webpack_module, exports) { + +"use strict"; + +var __assign = (this && this.__assign) || function () { + __assign = Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; + }; + return __assign.apply(this, arguments); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.namedReferences = exports.bodyRegExps = void 0; +// This file is autogenerated by tools/process-named-references.ts +var pairDivider = "~"; +var blockDivider = "~~"; +function generateNamedReferences(input, prev) { + var entities = {}; + var characters = {}; + var blocks = input.split(blockDivider); + var isOptionalBlock = false; + for (var i = 0; blocks.length > i; i++) { + var entries = blocks[i].split(pairDivider); + for (var j = 0; j < entries.length; j += 2) { + var entity = entries[j]; + var character = entries[j + 1]; + var fullEntity = '&' + entity + ';'; + entities[fullEntity] = character; + if (isOptionalBlock) { + entities['&' + entity] = character; + } + characters[character] = fullEntity; + } + isOptionalBlock = true; + } + return prev ? + { entities: __assign(__assign({}, entities), prev.entities), characters: __assign(__assign({}, characters), prev.characters) } : + { entities: entities, characters: characters }; +} +exports.bodyRegExps = { + xml: /&(?:#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+);?/g, + html4: /∉|&(?:nbsp|iexcl|cent|pound|curren|yen|brvbar|sect|uml|copy|ordf|laquo|not|shy|reg|macr|deg|plusmn|sup2|sup3|acute|micro|para|middot|cedil|sup1|ordm|raquo|frac14|frac12|frac34|iquest|Agrave|Aacute|Acirc|Atilde|Auml|Aring|AElig|Ccedil|Egrave|Eacute|Ecirc|Euml|Igrave|Iacute|Icirc|Iuml|ETH|Ntilde|Ograve|Oacute|Ocirc|Otilde|Ouml|times|Oslash|Ugrave|Uacute|Ucirc|Uuml|Yacute|THORN|szlig|agrave|aacute|acirc|atilde|auml|aring|aelig|ccedil|egrave|eacute|ecirc|euml|igrave|iacute|icirc|iuml|eth|ntilde|ograve|oacute|ocirc|otilde|ouml|divide|oslash|ugrave|uacute|ucirc|uuml|yacute|thorn|yuml|quot|amp|lt|gt|#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+);?/g, + html5: /·|℗|⋇|⪧|⩺|⋗|⦕|⩼|⪆|⥸|⋗|⋛|⪌|≷|≳|⪦|⩹|⋖|⋋|⋉|⥶|⩻|⦖|◃|⊴|◂|∉|⋹̸|⋵̸|∉|⋷|⋶|∌|∌|⋾|⋽|∥|⊠|⨱|⨰|&(?:AElig|AMP|Aacute|Acirc|Agrave|Aring|Atilde|Auml|COPY|Ccedil|ETH|Eacute|Ecirc|Egrave|Euml|GT|Iacute|Icirc|Igrave|Iuml|LT|Ntilde|Oacute|Ocirc|Ograve|Oslash|Otilde|Ouml|QUOT|REG|THORN|Uacute|Ucirc|Ugrave|Uuml|Yacute|aacute|acirc|acute|aelig|agrave|amp|aring|atilde|auml|brvbar|ccedil|cedil|cent|copy|curren|deg|divide|eacute|ecirc|egrave|eth|euml|frac12|frac14|frac34|gt|iacute|icirc|iexcl|igrave|iquest|iuml|laquo|lt|macr|micro|middot|nbsp|not|ntilde|oacute|ocirc|ograve|ordf|ordm|oslash|otilde|ouml|para|plusmn|pound|quot|raquo|reg|sect|shy|sup1|sup2|sup3|szlig|thorn|times|uacute|ucirc|ugrave|uml|uuml|yacute|yen|yuml|#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+);?/g +}; +exports.namedReferences = {}; +exports.namedReferences.xml = generateNamedReferences("lt~<~gt~>~quot~\"~apos~'~amp~&"); +exports.namedReferences.html4 = generateNamedReferences("apos~'~OElig~Œ~oelig~œ~Scaron~Š~scaron~š~Yuml~Ÿ~circ~ˆ~tilde~˜~ensp~ ~emsp~ ~thinsp~ ~zwnj~‌~zwj~‍~lrm~‎~rlm~‏~ndash~–~mdash~—~lsquo~‘~rsquo~’~sbquo~‚~ldquo~“~rdquo~”~bdquo~„~dagger~†~Dagger~‡~permil~‰~lsaquo~‹~rsaquo~›~euro~€~fnof~ƒ~Alpha~Α~Beta~Β~Gamma~Γ~Delta~Δ~Epsilon~Ε~Zeta~Ζ~Eta~Η~Theta~Θ~Iota~Ι~Kappa~Κ~Lambda~Λ~Mu~Μ~Nu~Ν~Xi~Ξ~Omicron~Ο~Pi~Π~Rho~Ρ~Sigma~Σ~Tau~Τ~Upsilon~Υ~Phi~Φ~Chi~Χ~Psi~Ψ~Omega~Ω~alpha~α~beta~β~gamma~γ~delta~δ~epsilon~ε~zeta~ζ~eta~η~theta~θ~iota~ι~kappa~κ~lambda~λ~mu~μ~nu~ν~xi~ξ~omicron~ο~pi~π~rho~ρ~sigmaf~ς~sigma~σ~tau~τ~upsilon~υ~phi~φ~chi~χ~psi~ψ~omega~ω~thetasym~ϑ~upsih~ϒ~piv~ϖ~bull~•~hellip~…~prime~′~Prime~″~oline~‾~frasl~⁄~weierp~℘~image~ℑ~real~ℜ~trade~™~alefsym~ℵ~larr~←~uarr~↑~rarr~→~darr~↓~harr~↔~crarr~↵~lArr~⇐~uArr~⇑~rArr~⇒~dArr~⇓~hArr~⇔~forall~∀~part~∂~exist~∃~empty~∅~nabla~∇~isin~∈~notin~∉~ni~∋~prod~∏~sum~∑~minus~−~lowast~∗~radic~√~prop~∝~infin~∞~ang~∠~and~∧~or~∨~cap~∩~cup~∪~int~∫~there4~∴~sim~∼~cong~≅~asymp~≈~ne~≠~equiv~≡~le~≤~ge~≥~sub~⊂~sup~⊃~nsub~⊄~sube~⊆~supe~⊇~oplus~⊕~otimes~⊗~perp~⊥~sdot~⋅~lceil~⌈~rceil~⌉~lfloor~⌊~rfloor~⌋~lang~〈~rang~〉~loz~◊~spades~♠~clubs~♣~hearts~♥~diams~♦~~nbsp~ ~iexcl~¡~cent~¢~pound~£~curren~¤~yen~¥~brvbar~¦~sect~§~uml~¨~copy~©~ordf~ª~laquo~«~not~¬~shy~­~reg~®~macr~¯~deg~°~plusmn~±~sup2~²~sup3~³~acute~´~micro~µ~para~¶~middot~·~cedil~¸~sup1~¹~ordm~º~raquo~»~frac14~¼~frac12~½~frac34~¾~iquest~¿~Agrave~À~Aacute~Á~Acirc~Â~Atilde~Ã~Auml~Ä~Aring~Å~AElig~Æ~Ccedil~Ç~Egrave~È~Eacute~É~Ecirc~Ê~Euml~Ë~Igrave~Ì~Iacute~Í~Icirc~Î~Iuml~Ï~ETH~Ð~Ntilde~Ñ~Ograve~Ò~Oacute~Ó~Ocirc~Ô~Otilde~Õ~Ouml~Ö~times~×~Oslash~Ø~Ugrave~Ù~Uacute~Ú~Ucirc~Û~Uuml~Ü~Yacute~Ý~THORN~Þ~szlig~ß~agrave~à~aacute~á~acirc~â~atilde~ã~auml~ä~aring~å~aelig~æ~ccedil~ç~egrave~è~eacute~é~ecirc~ê~euml~ë~igrave~ì~iacute~í~icirc~î~iuml~ï~eth~ð~ntilde~ñ~ograve~ò~oacute~ó~ocirc~ô~otilde~õ~ouml~ö~divide~÷~oslash~ø~ugrave~ù~uacute~ú~ucirc~û~uuml~ü~yacute~ý~thorn~þ~yuml~ÿ~quot~\"~amp~&~lt~<~gt~>"); +exports.namedReferences.html5 = generateNamedReferences("Abreve~Ă~Acy~А~Afr~𝔄~Amacr~Ā~And~⩓~Aogon~Ą~Aopf~𝔸~ApplyFunction~⁡~Ascr~𝒜~Assign~≔~Backslash~∖~Barv~⫧~Barwed~⌆~Bcy~Б~Because~∵~Bernoullis~ℬ~Bfr~𝔅~Bopf~𝔹~Breve~˘~Bscr~ℬ~Bumpeq~≎~CHcy~Ч~Cacute~Ć~Cap~⋒~CapitalDifferentialD~ⅅ~Cayleys~ℭ~Ccaron~Č~Ccirc~Ĉ~Cconint~∰~Cdot~Ċ~Cedilla~¸~CenterDot~·~Cfr~ℭ~CircleDot~⊙~CircleMinus~⊖~CirclePlus~⊕~CircleTimes~⊗~ClockwiseContourIntegral~∲~CloseCurlyDoubleQuote~”~CloseCurlyQuote~’~Colon~∷~Colone~⩴~Congruent~≡~Conint~∯~ContourIntegral~∮~Copf~ℂ~Coproduct~∐~CounterClockwiseContourIntegral~∳~Cross~⨯~Cscr~𝒞~Cup~⋓~CupCap~≍~DD~ⅅ~DDotrahd~⤑~DJcy~Ђ~DScy~Ѕ~DZcy~Џ~Darr~↡~Dashv~⫤~Dcaron~Ď~Dcy~Д~Del~∇~Dfr~𝔇~DiacriticalAcute~´~DiacriticalDot~˙~DiacriticalDoubleAcute~˝~DiacriticalGrave~`~DiacriticalTilde~˜~Diamond~⋄~DifferentialD~ⅆ~Dopf~𝔻~Dot~¨~DotDot~⃜~DotEqual~≐~DoubleContourIntegral~∯~DoubleDot~¨~DoubleDownArrow~⇓~DoubleLeftArrow~⇐~DoubleLeftRightArrow~⇔~DoubleLeftTee~⫤~DoubleLongLeftArrow~⟸~DoubleLongLeftRightArrow~⟺~DoubleLongRightArrow~⟹~DoubleRightArrow~⇒~DoubleRightTee~⊨~DoubleUpArrow~⇑~DoubleUpDownArrow~⇕~DoubleVerticalBar~∥~DownArrow~↓~DownArrowBar~⤓~DownArrowUpArrow~⇵~DownBreve~̑~DownLeftRightVector~⥐~DownLeftTeeVector~⥞~DownLeftVector~↽~DownLeftVectorBar~⥖~DownRightTeeVector~⥟~DownRightVector~⇁~DownRightVectorBar~⥗~DownTee~⊤~DownTeeArrow~↧~Downarrow~⇓~Dscr~𝒟~Dstrok~Đ~ENG~Ŋ~Ecaron~Ě~Ecy~Э~Edot~Ė~Efr~𝔈~Element~∈~Emacr~Ē~EmptySmallSquare~◻~EmptyVerySmallSquare~▫~Eogon~Ę~Eopf~𝔼~Equal~⩵~EqualTilde~≂~Equilibrium~⇌~Escr~ℰ~Esim~⩳~Exists~∃~ExponentialE~ⅇ~Fcy~Ф~Ffr~𝔉~FilledSmallSquare~◼~FilledVerySmallSquare~▪~Fopf~𝔽~ForAll~∀~Fouriertrf~ℱ~Fscr~ℱ~GJcy~Ѓ~Gammad~Ϝ~Gbreve~Ğ~Gcedil~Ģ~Gcirc~Ĝ~Gcy~Г~Gdot~Ġ~Gfr~𝔊~Gg~⋙~Gopf~𝔾~GreaterEqual~≥~GreaterEqualLess~⋛~GreaterFullEqual~≧~GreaterGreater~⪢~GreaterLess~≷~GreaterSlantEqual~⩾~GreaterTilde~≳~Gscr~𝒢~Gt~≫~HARDcy~Ъ~Hacek~ˇ~Hat~^~Hcirc~Ĥ~Hfr~ℌ~HilbertSpace~ℋ~Hopf~ℍ~HorizontalLine~─~Hscr~ℋ~Hstrok~Ħ~HumpDownHump~≎~HumpEqual~≏~IEcy~Е~IJlig~IJ~IOcy~Ё~Icy~И~Idot~İ~Ifr~ℑ~Im~ℑ~Imacr~Ī~ImaginaryI~ⅈ~Implies~⇒~Int~∬~Integral~∫~Intersection~⋂~InvisibleComma~⁣~InvisibleTimes~⁢~Iogon~Į~Iopf~𝕀~Iscr~ℐ~Itilde~Ĩ~Iukcy~І~Jcirc~Ĵ~Jcy~Й~Jfr~𝔍~Jopf~𝕁~Jscr~𝒥~Jsercy~Ј~Jukcy~Є~KHcy~Х~KJcy~Ќ~Kcedil~Ķ~Kcy~К~Kfr~𝔎~Kopf~𝕂~Kscr~𝒦~LJcy~Љ~Lacute~Ĺ~Lang~⟪~Laplacetrf~ℒ~Larr~↞~Lcaron~Ľ~Lcedil~Ļ~Lcy~Л~LeftAngleBracket~⟨~LeftArrow~←~LeftArrowBar~⇤~LeftArrowRightArrow~⇆~LeftCeiling~⌈~LeftDoubleBracket~⟦~LeftDownTeeVector~⥡~LeftDownVector~⇃~LeftDownVectorBar~⥙~LeftFloor~⌊~LeftRightArrow~↔~LeftRightVector~⥎~LeftTee~⊣~LeftTeeArrow~↤~LeftTeeVector~⥚~LeftTriangle~⊲~LeftTriangleBar~⧏~LeftTriangleEqual~⊴~LeftUpDownVector~⥑~LeftUpTeeVector~⥠~LeftUpVector~↿~LeftUpVectorBar~⥘~LeftVector~↼~LeftVectorBar~⥒~Leftarrow~⇐~Leftrightarrow~⇔~LessEqualGreater~⋚~LessFullEqual~≦~LessGreater~≶~LessLess~⪡~LessSlantEqual~⩽~LessTilde~≲~Lfr~𝔏~Ll~⋘~Lleftarrow~⇚~Lmidot~Ŀ~LongLeftArrow~⟵~LongLeftRightArrow~⟷~LongRightArrow~⟶~Longleftarrow~⟸~Longleftrightarrow~⟺~Longrightarrow~⟹~Lopf~𝕃~LowerLeftArrow~↙~LowerRightArrow~↘~Lscr~ℒ~Lsh~↰~Lstrok~Ł~Lt~≪~Map~⤅~Mcy~М~MediumSpace~ ~Mellintrf~ℳ~Mfr~𝔐~MinusPlus~∓~Mopf~𝕄~Mscr~ℳ~NJcy~Њ~Nacute~Ń~Ncaron~Ň~Ncedil~Ņ~Ncy~Н~NegativeMediumSpace~​~NegativeThickSpace~​~NegativeThinSpace~​~NegativeVeryThinSpace~​~NestedGreaterGreater~≫~NestedLessLess~≪~NewLine~\n~Nfr~𝔑~NoBreak~⁠~NonBreakingSpace~ ~Nopf~ℕ~Not~⫬~NotCongruent~≢~NotCupCap~≭~NotDoubleVerticalBar~∦~NotElement~∉~NotEqual~≠~NotEqualTilde~≂̸~NotExists~∄~NotGreater~≯~NotGreaterEqual~≱~NotGreaterFullEqual~≧̸~NotGreaterGreater~≫̸~NotGreaterLess~≹~NotGreaterSlantEqual~⩾̸~NotGreaterTilde~≵~NotHumpDownHump~≎̸~NotHumpEqual~≏̸~NotLeftTriangle~⋪~NotLeftTriangleBar~⧏̸~NotLeftTriangleEqual~⋬~NotLess~≮~NotLessEqual~≰~NotLessGreater~≸~NotLessLess~≪̸~NotLessSlantEqual~⩽̸~NotLessTilde~≴~NotNestedGreaterGreater~⪢̸~NotNestedLessLess~⪡̸~NotPrecedes~⊀~NotPrecedesEqual~⪯̸~NotPrecedesSlantEqual~⋠~NotReverseElement~∌~NotRightTriangle~⋫~NotRightTriangleBar~⧐̸~NotRightTriangleEqual~⋭~NotSquareSubset~⊏̸~NotSquareSubsetEqual~⋢~NotSquareSuperset~⊐̸~NotSquareSupersetEqual~⋣~NotSubset~⊂⃒~NotSubsetEqual~⊈~NotSucceeds~⊁~NotSucceedsEqual~⪰̸~NotSucceedsSlantEqual~⋡~NotSucceedsTilde~≿̸~NotSuperset~⊃⃒~NotSupersetEqual~⊉~NotTilde~≁~NotTildeEqual~≄~NotTildeFullEqual~≇~NotTildeTilde~≉~NotVerticalBar~∤~Nscr~𝒩~Ocy~О~Odblac~Ő~Ofr~𝔒~Omacr~Ō~Oopf~𝕆~OpenCurlyDoubleQuote~“~OpenCurlyQuote~‘~Or~⩔~Oscr~𝒪~Otimes~⨷~OverBar~‾~OverBrace~⏞~OverBracket~⎴~OverParenthesis~⏜~PartialD~∂~Pcy~П~Pfr~𝔓~PlusMinus~±~Poincareplane~ℌ~Popf~ℙ~Pr~⪻~Precedes~≺~PrecedesEqual~⪯~PrecedesSlantEqual~≼~PrecedesTilde~≾~Product~∏~Proportion~∷~Proportional~∝~Pscr~𝒫~Qfr~𝔔~Qopf~ℚ~Qscr~𝒬~RBarr~⤐~Racute~Ŕ~Rang~⟫~Rarr~↠~Rarrtl~⤖~Rcaron~Ř~Rcedil~Ŗ~Rcy~Р~Re~ℜ~ReverseElement~∋~ReverseEquilibrium~⇋~ReverseUpEquilibrium~⥯~Rfr~ℜ~RightAngleBracket~⟩~RightArrow~→~RightArrowBar~⇥~RightArrowLeftArrow~⇄~RightCeiling~⌉~RightDoubleBracket~⟧~RightDownTeeVector~⥝~RightDownVector~⇂~RightDownVectorBar~⥕~RightFloor~⌋~RightTee~⊢~RightTeeArrow~↦~RightTeeVector~⥛~RightTriangle~⊳~RightTriangleBar~⧐~RightTriangleEqual~⊵~RightUpDownVector~⥏~RightUpTeeVector~⥜~RightUpVector~↾~RightUpVectorBar~⥔~RightVector~⇀~RightVectorBar~⥓~Rightarrow~⇒~Ropf~ℝ~RoundImplies~⥰~Rrightarrow~⇛~Rscr~ℛ~Rsh~↱~RuleDelayed~⧴~SHCHcy~Щ~SHcy~Ш~SOFTcy~Ь~Sacute~Ś~Sc~⪼~Scedil~Ş~Scirc~Ŝ~Scy~С~Sfr~𝔖~ShortDownArrow~↓~ShortLeftArrow~←~ShortRightArrow~→~ShortUpArrow~↑~SmallCircle~∘~Sopf~𝕊~Sqrt~√~Square~□~SquareIntersection~⊓~SquareSubset~⊏~SquareSubsetEqual~⊑~SquareSuperset~⊐~SquareSupersetEqual~⊒~SquareUnion~⊔~Sscr~𝒮~Star~⋆~Sub~⋐~Subset~⋐~SubsetEqual~⊆~Succeeds~≻~SucceedsEqual~⪰~SucceedsSlantEqual~≽~SucceedsTilde~≿~SuchThat~∋~Sum~∑~Sup~⋑~Superset~⊃~SupersetEqual~⊇~Supset~⋑~TRADE~™~TSHcy~Ћ~TScy~Ц~Tab~\t~Tcaron~Ť~Tcedil~Ţ~Tcy~Т~Tfr~𝔗~Therefore~∴~ThickSpace~  ~ThinSpace~ ~Tilde~∼~TildeEqual~≃~TildeFullEqual~≅~TildeTilde~≈~Topf~𝕋~TripleDot~⃛~Tscr~𝒯~Tstrok~Ŧ~Uarr~↟~Uarrocir~⥉~Ubrcy~Ў~Ubreve~Ŭ~Ucy~У~Udblac~Ű~Ufr~𝔘~Umacr~Ū~UnderBar~_~UnderBrace~⏟~UnderBracket~⎵~UnderParenthesis~⏝~Union~⋃~UnionPlus~⊎~Uogon~Ų~Uopf~𝕌~UpArrow~↑~UpArrowBar~⤒~UpArrowDownArrow~⇅~UpDownArrow~↕~UpEquilibrium~⥮~UpTee~⊥~UpTeeArrow~↥~Uparrow~⇑~Updownarrow~⇕~UpperLeftArrow~↖~UpperRightArrow~↗~Upsi~ϒ~Uring~Ů~Uscr~𝒰~Utilde~Ũ~VDash~⊫~Vbar~⫫~Vcy~В~Vdash~⊩~Vdashl~⫦~Vee~⋁~Verbar~‖~Vert~‖~VerticalBar~∣~VerticalLine~|~VerticalSeparator~❘~VerticalTilde~≀~VeryThinSpace~ ~Vfr~𝔙~Vopf~𝕍~Vscr~𝒱~Vvdash~⊪~Wcirc~Ŵ~Wedge~⋀~Wfr~𝔚~Wopf~𝕎~Wscr~𝒲~Xfr~𝔛~Xopf~𝕏~Xscr~𝒳~YAcy~Я~YIcy~Ї~YUcy~Ю~Ycirc~Ŷ~Ycy~Ы~Yfr~𝔜~Yopf~𝕐~Yscr~𝒴~ZHcy~Ж~Zacute~Ź~Zcaron~Ž~Zcy~З~Zdot~Ż~ZeroWidthSpace~​~Zfr~ℨ~Zopf~ℤ~Zscr~𝒵~abreve~ă~ac~∾~acE~∾̳~acd~∿~acy~а~af~⁡~afr~𝔞~aleph~ℵ~amacr~ā~amalg~⨿~andand~⩕~andd~⩜~andslope~⩘~andv~⩚~ange~⦤~angle~∠~angmsd~∡~angmsdaa~⦨~angmsdab~⦩~angmsdac~⦪~angmsdad~⦫~angmsdae~⦬~angmsdaf~⦭~angmsdag~⦮~angmsdah~⦯~angrt~∟~angrtvb~⊾~angrtvbd~⦝~angsph~∢~angst~Å~angzarr~⍼~aogon~ą~aopf~𝕒~ap~≈~apE~⩰~apacir~⩯~ape~≊~apid~≋~approx~≈~approxeq~≊~ascr~𝒶~ast~*~asympeq~≍~awconint~∳~awint~⨑~bNot~⫭~backcong~≌~backepsilon~϶~backprime~‵~backsim~∽~backsimeq~⋍~barvee~⊽~barwed~⌅~barwedge~⌅~bbrk~⎵~bbrktbrk~⎶~bcong~≌~bcy~б~becaus~∵~because~∵~bemptyv~⦰~bepsi~϶~bernou~ℬ~beth~ℶ~between~≬~bfr~𝔟~bigcap~⋂~bigcirc~◯~bigcup~⋃~bigodot~⨀~bigoplus~⨁~bigotimes~⨂~bigsqcup~⨆~bigstar~★~bigtriangledown~▽~bigtriangleup~△~biguplus~⨄~bigvee~⋁~bigwedge~⋀~bkarow~⤍~blacklozenge~⧫~blacksquare~▪~blacktriangle~▴~blacktriangledown~▾~blacktriangleleft~◂~blacktriangleright~▸~blank~␣~blk12~▒~blk14~░~blk34~▓~block~█~bne~=⃥~bnequiv~≡⃥~bnot~⌐~bopf~𝕓~bot~⊥~bottom~⊥~bowtie~⋈~boxDL~╗~boxDR~╔~boxDl~╖~boxDr~╓~boxH~═~boxHD~╦~boxHU~╩~boxHd~╤~boxHu~╧~boxUL~╝~boxUR~╚~boxUl~╜~boxUr~╙~boxV~║~boxVH~╬~boxVL~╣~boxVR~╠~boxVh~╫~boxVl~╢~boxVr~╟~boxbox~⧉~boxdL~╕~boxdR~╒~boxdl~┐~boxdr~┌~boxh~─~boxhD~╥~boxhU~╨~boxhd~┬~boxhu~┴~boxminus~⊟~boxplus~⊞~boxtimes~⊠~boxuL~╛~boxuR~╘~boxul~┘~boxur~└~boxv~│~boxvH~╪~boxvL~╡~boxvR~╞~boxvh~┼~boxvl~┤~boxvr~├~bprime~‵~breve~˘~bscr~𝒷~bsemi~⁏~bsim~∽~bsime~⋍~bsol~\\~bsolb~⧅~bsolhsub~⟈~bullet~•~bump~≎~bumpE~⪮~bumpe~≏~bumpeq~≏~cacute~ć~capand~⩄~capbrcup~⩉~capcap~⩋~capcup~⩇~capdot~⩀~caps~∩︀~caret~⁁~caron~ˇ~ccaps~⩍~ccaron~č~ccirc~ĉ~ccups~⩌~ccupssm~⩐~cdot~ċ~cemptyv~⦲~centerdot~·~cfr~𝔠~chcy~ч~check~✓~checkmark~✓~cir~○~cirE~⧃~circeq~≗~circlearrowleft~↺~circlearrowright~↻~circledR~®~circledS~Ⓢ~circledast~⊛~circledcirc~⊚~circleddash~⊝~cire~≗~cirfnint~⨐~cirmid~⫯~cirscir~⧂~clubsuit~♣~colon~:~colone~≔~coloneq~≔~comma~,~commat~@~comp~∁~compfn~∘~complement~∁~complexes~ℂ~congdot~⩭~conint~∮~copf~𝕔~coprod~∐~copysr~℗~cross~✗~cscr~𝒸~csub~⫏~csube~⫑~csup~⫐~csupe~⫒~ctdot~⋯~cudarrl~⤸~cudarrr~⤵~cuepr~⋞~cuesc~⋟~cularr~↶~cularrp~⤽~cupbrcap~⩈~cupcap~⩆~cupcup~⩊~cupdot~⊍~cupor~⩅~cups~∪︀~curarr~↷~curarrm~⤼~curlyeqprec~⋞~curlyeqsucc~⋟~curlyvee~⋎~curlywedge~⋏~curvearrowleft~↶~curvearrowright~↷~cuvee~⋎~cuwed~⋏~cwconint~∲~cwint~∱~cylcty~⌭~dHar~⥥~daleth~ℸ~dash~‐~dashv~⊣~dbkarow~⤏~dblac~˝~dcaron~ď~dcy~д~dd~ⅆ~ddagger~‡~ddarr~⇊~ddotseq~⩷~demptyv~⦱~dfisht~⥿~dfr~𝔡~dharl~⇃~dharr~⇂~diam~⋄~diamond~⋄~diamondsuit~♦~die~¨~digamma~ϝ~disin~⋲~div~÷~divideontimes~⋇~divonx~⋇~djcy~ђ~dlcorn~⌞~dlcrop~⌍~dollar~$~dopf~𝕕~dot~˙~doteq~≐~doteqdot~≑~dotminus~∸~dotplus~∔~dotsquare~⊡~doublebarwedge~⌆~downarrow~↓~downdownarrows~⇊~downharpoonleft~⇃~downharpoonright~⇂~drbkarow~⤐~drcorn~⌟~drcrop~⌌~dscr~𝒹~dscy~ѕ~dsol~⧶~dstrok~đ~dtdot~⋱~dtri~▿~dtrif~▾~duarr~⇵~duhar~⥯~dwangle~⦦~dzcy~џ~dzigrarr~⟿~eDDot~⩷~eDot~≑~easter~⩮~ecaron~ě~ecir~≖~ecolon~≕~ecy~э~edot~ė~ee~ⅇ~efDot~≒~efr~𝔢~eg~⪚~egs~⪖~egsdot~⪘~el~⪙~elinters~⏧~ell~ℓ~els~⪕~elsdot~⪗~emacr~ē~emptyset~∅~emptyv~∅~emsp13~ ~emsp14~ ~eng~ŋ~eogon~ę~eopf~𝕖~epar~⋕~eparsl~⧣~eplus~⩱~epsi~ε~epsiv~ϵ~eqcirc~≖~eqcolon~≕~eqsim~≂~eqslantgtr~⪖~eqslantless~⪕~equals~=~equest~≟~equivDD~⩸~eqvparsl~⧥~erDot~≓~erarr~⥱~escr~ℯ~esdot~≐~esim~≂~excl~!~expectation~ℰ~exponentiale~ⅇ~fallingdotseq~≒~fcy~ф~female~♀~ffilig~ffi~fflig~ff~ffllig~ffl~ffr~𝔣~filig~fi~fjlig~fj~flat~♭~fllig~fl~fltns~▱~fopf~𝕗~fork~⋔~forkv~⫙~fpartint~⨍~frac13~⅓~frac15~⅕~frac16~⅙~frac18~⅛~frac23~⅔~frac25~⅖~frac35~⅗~frac38~⅜~frac45~⅘~frac56~⅚~frac58~⅝~frac78~⅞~frown~⌢~fscr~𝒻~gE~≧~gEl~⪌~gacute~ǵ~gammad~ϝ~gap~⪆~gbreve~ğ~gcirc~ĝ~gcy~г~gdot~ġ~gel~⋛~geq~≥~geqq~≧~geqslant~⩾~ges~⩾~gescc~⪩~gesdot~⪀~gesdoto~⪂~gesdotol~⪄~gesl~⋛︀~gesles~⪔~gfr~𝔤~gg~≫~ggg~⋙~gimel~ℷ~gjcy~ѓ~gl~≷~glE~⪒~gla~⪥~glj~⪤~gnE~≩~gnap~⪊~gnapprox~⪊~gne~⪈~gneq~⪈~gneqq~≩~gnsim~⋧~gopf~𝕘~grave~`~gscr~ℊ~gsim~≳~gsime~⪎~gsiml~⪐~gtcc~⪧~gtcir~⩺~gtdot~⋗~gtlPar~⦕~gtquest~⩼~gtrapprox~⪆~gtrarr~⥸~gtrdot~⋗~gtreqless~⋛~gtreqqless~⪌~gtrless~≷~gtrsim~≳~gvertneqq~≩︀~gvnE~≩︀~hairsp~ ~half~½~hamilt~ℋ~hardcy~ъ~harrcir~⥈~harrw~↭~hbar~ℏ~hcirc~ĥ~heartsuit~♥~hercon~⊹~hfr~𝔥~hksearow~⤥~hkswarow~⤦~hoarr~⇿~homtht~∻~hookleftarrow~↩~hookrightarrow~↪~hopf~𝕙~horbar~―~hscr~𝒽~hslash~ℏ~hstrok~ħ~hybull~⁃~hyphen~‐~ic~⁣~icy~и~iecy~е~iff~⇔~ifr~𝔦~ii~ⅈ~iiiint~⨌~iiint~∭~iinfin~⧜~iiota~℩~ijlig~ij~imacr~ī~imagline~ℐ~imagpart~ℑ~imath~ı~imof~⊷~imped~Ƶ~in~∈~incare~℅~infintie~⧝~inodot~ı~intcal~⊺~integers~ℤ~intercal~⊺~intlarhk~⨗~intprod~⨼~iocy~ё~iogon~į~iopf~𝕚~iprod~⨼~iscr~𝒾~isinE~⋹~isindot~⋵~isins~⋴~isinsv~⋳~isinv~∈~it~⁢~itilde~ĩ~iukcy~і~jcirc~ĵ~jcy~й~jfr~𝔧~jmath~ȷ~jopf~𝕛~jscr~𝒿~jsercy~ј~jukcy~є~kappav~ϰ~kcedil~ķ~kcy~к~kfr~𝔨~kgreen~ĸ~khcy~х~kjcy~ќ~kopf~𝕜~kscr~𝓀~lAarr~⇚~lAtail~⤛~lBarr~⤎~lE~≦~lEg~⪋~lHar~⥢~lacute~ĺ~laemptyv~⦴~lagran~ℒ~langd~⦑~langle~⟨~lap~⪅~larrb~⇤~larrbfs~⤟~larrfs~⤝~larrhk~↩~larrlp~↫~larrpl~⤹~larrsim~⥳~larrtl~↢~lat~⪫~latail~⤙~late~⪭~lates~⪭︀~lbarr~⤌~lbbrk~❲~lbrace~{~lbrack~[~lbrke~⦋~lbrksld~⦏~lbrkslu~⦍~lcaron~ľ~lcedil~ļ~lcub~{~lcy~л~ldca~⤶~ldquor~„~ldrdhar~⥧~ldrushar~⥋~ldsh~↲~leftarrow~←~leftarrowtail~↢~leftharpoondown~↽~leftharpoonup~↼~leftleftarrows~⇇~leftrightarrow~↔~leftrightarrows~⇆~leftrightharpoons~⇋~leftrightsquigarrow~↭~leftthreetimes~⋋~leg~⋚~leq~≤~leqq~≦~leqslant~⩽~les~⩽~lescc~⪨~lesdot~⩿~lesdoto~⪁~lesdotor~⪃~lesg~⋚︀~lesges~⪓~lessapprox~⪅~lessdot~⋖~lesseqgtr~⋚~lesseqqgtr~⪋~lessgtr~≶~lesssim~≲~lfisht~⥼~lfr~𝔩~lg~≶~lgE~⪑~lhard~↽~lharu~↼~lharul~⥪~lhblk~▄~ljcy~љ~ll~≪~llarr~⇇~llcorner~⌞~llhard~⥫~lltri~◺~lmidot~ŀ~lmoust~⎰~lmoustache~⎰~lnE~≨~lnap~⪉~lnapprox~⪉~lne~⪇~lneq~⪇~lneqq~≨~lnsim~⋦~loang~⟬~loarr~⇽~lobrk~⟦~longleftarrow~⟵~longleftrightarrow~⟷~longmapsto~⟼~longrightarrow~⟶~looparrowleft~↫~looparrowright~↬~lopar~⦅~lopf~𝕝~loplus~⨭~lotimes~⨴~lowbar~_~lozenge~◊~lozf~⧫~lpar~(~lparlt~⦓~lrarr~⇆~lrcorner~⌟~lrhar~⇋~lrhard~⥭~lrtri~⊿~lscr~𝓁~lsh~↰~lsim~≲~lsime~⪍~lsimg~⪏~lsqb~[~lsquor~‚~lstrok~ł~ltcc~⪦~ltcir~⩹~ltdot~⋖~lthree~⋋~ltimes~⋉~ltlarr~⥶~ltquest~⩻~ltrPar~⦖~ltri~◃~ltrie~⊴~ltrif~◂~lurdshar~⥊~luruhar~⥦~lvertneqq~≨︀~lvnE~≨︀~mDDot~∺~male~♂~malt~✠~maltese~✠~map~↦~mapsto~↦~mapstodown~↧~mapstoleft~↤~mapstoup~↥~marker~▮~mcomma~⨩~mcy~м~measuredangle~∡~mfr~𝔪~mho~℧~mid~∣~midast~*~midcir~⫰~minusb~⊟~minusd~∸~minusdu~⨪~mlcp~⫛~mldr~…~mnplus~∓~models~⊧~mopf~𝕞~mp~∓~mscr~𝓂~mstpos~∾~multimap~⊸~mumap~⊸~nGg~⋙̸~nGt~≫⃒~nGtv~≫̸~nLeftarrow~⇍~nLeftrightarrow~⇎~nLl~⋘̸~nLt~≪⃒~nLtv~≪̸~nRightarrow~⇏~nVDash~⊯~nVdash~⊮~nacute~ń~nang~∠⃒~nap~≉~napE~⩰̸~napid~≋̸~napos~ʼn~napprox~≉~natur~♮~natural~♮~naturals~ℕ~nbump~≎̸~nbumpe~≏̸~ncap~⩃~ncaron~ň~ncedil~ņ~ncong~≇~ncongdot~⩭̸~ncup~⩂~ncy~н~neArr~⇗~nearhk~⤤~nearr~↗~nearrow~↗~nedot~≐̸~nequiv~≢~nesear~⤨~nesim~≂̸~nexist~∄~nexists~∄~nfr~𝔫~ngE~≧̸~nge~≱~ngeq~≱~ngeqq~≧̸~ngeqslant~⩾̸~nges~⩾̸~ngsim~≵~ngt~≯~ngtr~≯~nhArr~⇎~nharr~↮~nhpar~⫲~nis~⋼~nisd~⋺~niv~∋~njcy~њ~nlArr~⇍~nlE~≦̸~nlarr~↚~nldr~‥~nle~≰~nleftarrow~↚~nleftrightarrow~↮~nleq~≰~nleqq~≦̸~nleqslant~⩽̸~nles~⩽̸~nless~≮~nlsim~≴~nlt~≮~nltri~⋪~nltrie~⋬~nmid~∤~nopf~𝕟~notinE~⋹̸~notindot~⋵̸~notinva~∉~notinvb~⋷~notinvc~⋶~notni~∌~notniva~∌~notnivb~⋾~notnivc~⋽~npar~∦~nparallel~∦~nparsl~⫽⃥~npart~∂̸~npolint~⨔~npr~⊀~nprcue~⋠~npre~⪯̸~nprec~⊀~npreceq~⪯̸~nrArr~⇏~nrarr~↛~nrarrc~⤳̸~nrarrw~↝̸~nrightarrow~↛~nrtri~⋫~nrtrie~⋭~nsc~⊁~nsccue~⋡~nsce~⪰̸~nscr~𝓃~nshortmid~∤~nshortparallel~∦~nsim~≁~nsime~≄~nsimeq~≄~nsmid~∤~nspar~∦~nsqsube~⋢~nsqsupe~⋣~nsubE~⫅̸~nsube~⊈~nsubset~⊂⃒~nsubseteq~⊈~nsubseteqq~⫅̸~nsucc~⊁~nsucceq~⪰̸~nsup~⊅~nsupE~⫆̸~nsupe~⊉~nsupset~⊃⃒~nsupseteq~⊉~nsupseteqq~⫆̸~ntgl~≹~ntlg~≸~ntriangleleft~⋪~ntrianglelefteq~⋬~ntriangleright~⋫~ntrianglerighteq~⋭~num~#~numero~№~numsp~ ~nvDash~⊭~nvHarr~⤄~nvap~≍⃒~nvdash~⊬~nvge~≥⃒~nvgt~>⃒~nvinfin~⧞~nvlArr~⤂~nvle~≤⃒~nvlt~<⃒~nvltrie~⊴⃒~nvrArr~⤃~nvrtrie~⊵⃒~nvsim~∼⃒~nwArr~⇖~nwarhk~⤣~nwarr~↖~nwarrow~↖~nwnear~⤧~oS~Ⓢ~oast~⊛~ocir~⊚~ocy~о~odash~⊝~odblac~ő~odiv~⨸~odot~⊙~odsold~⦼~ofcir~⦿~ofr~𝔬~ogon~˛~ogt~⧁~ohbar~⦵~ohm~Ω~oint~∮~olarr~↺~olcir~⦾~olcross~⦻~olt~⧀~omacr~ō~omid~⦶~ominus~⊖~oopf~𝕠~opar~⦷~operp~⦹~orarr~↻~ord~⩝~order~ℴ~orderof~ℴ~origof~⊶~oror~⩖~orslope~⩗~orv~⩛~oscr~ℴ~osol~⊘~otimesas~⨶~ovbar~⌽~par~∥~parallel~∥~parsim~⫳~parsl~⫽~pcy~п~percnt~%~period~.~pertenk~‱~pfr~𝔭~phiv~ϕ~phmmat~ℳ~phone~☎~pitchfork~⋔~planck~ℏ~planckh~ℎ~plankv~ℏ~plus~+~plusacir~⨣~plusb~⊞~pluscir~⨢~plusdo~∔~plusdu~⨥~pluse~⩲~plussim~⨦~plustwo~⨧~pm~±~pointint~⨕~popf~𝕡~pr~≺~prE~⪳~prap~⪷~prcue~≼~pre~⪯~prec~≺~precapprox~⪷~preccurlyeq~≼~preceq~⪯~precnapprox~⪹~precneqq~⪵~precnsim~⋨~precsim~≾~primes~ℙ~prnE~⪵~prnap~⪹~prnsim~⋨~profalar~⌮~profline~⌒~profsurf~⌓~propto~∝~prsim~≾~prurel~⊰~pscr~𝓅~puncsp~ ~qfr~𝔮~qint~⨌~qopf~𝕢~qprime~⁗~qscr~𝓆~quaternions~ℍ~quatint~⨖~quest~?~questeq~≟~rAarr~⇛~rAtail~⤜~rBarr~⤏~rHar~⥤~race~∽̱~racute~ŕ~raemptyv~⦳~rangd~⦒~range~⦥~rangle~⟩~rarrap~⥵~rarrb~⇥~rarrbfs~⤠~rarrc~⤳~rarrfs~⤞~rarrhk~↪~rarrlp~↬~rarrpl~⥅~rarrsim~⥴~rarrtl~↣~rarrw~↝~ratail~⤚~ratio~∶~rationals~ℚ~rbarr~⤍~rbbrk~❳~rbrace~}~rbrack~]~rbrke~⦌~rbrksld~⦎~rbrkslu~⦐~rcaron~ř~rcedil~ŗ~rcub~}~rcy~р~rdca~⤷~rdldhar~⥩~rdquor~”~rdsh~↳~realine~ℛ~realpart~ℜ~reals~ℝ~rect~▭~rfisht~⥽~rfr~𝔯~rhard~⇁~rharu~⇀~rharul~⥬~rhov~ϱ~rightarrow~→~rightarrowtail~↣~rightharpoondown~⇁~rightharpoonup~⇀~rightleftarrows~⇄~rightleftharpoons~⇌~rightrightarrows~⇉~rightsquigarrow~↝~rightthreetimes~⋌~ring~˚~risingdotseq~≓~rlarr~⇄~rlhar~⇌~rmoust~⎱~rmoustache~⎱~rnmid~⫮~roang~⟭~roarr~⇾~robrk~⟧~ropar~⦆~ropf~𝕣~roplus~⨮~rotimes~⨵~rpar~)~rpargt~⦔~rppolint~⨒~rrarr~⇉~rscr~𝓇~rsh~↱~rsqb~]~rsquor~’~rthree~⋌~rtimes~⋊~rtri~▹~rtrie~⊵~rtrif~▸~rtriltri~⧎~ruluhar~⥨~rx~℞~sacute~ś~sc~≻~scE~⪴~scap~⪸~sccue~≽~sce~⪰~scedil~ş~scirc~ŝ~scnE~⪶~scnap~⪺~scnsim~⋩~scpolint~⨓~scsim~≿~scy~с~sdotb~⊡~sdote~⩦~seArr~⇘~searhk~⤥~searr~↘~searrow~↘~semi~;~seswar~⤩~setminus~∖~setmn~∖~sext~✶~sfr~𝔰~sfrown~⌢~sharp~♯~shchcy~щ~shcy~ш~shortmid~∣~shortparallel~∥~sigmav~ς~simdot~⩪~sime~≃~simeq~≃~simg~⪞~simgE~⪠~siml~⪝~simlE~⪟~simne~≆~simplus~⨤~simrarr~⥲~slarr~←~smallsetminus~∖~smashp~⨳~smeparsl~⧤~smid~∣~smile~⌣~smt~⪪~smte~⪬~smtes~⪬︀~softcy~ь~sol~/~solb~⧄~solbar~⌿~sopf~𝕤~spadesuit~♠~spar~∥~sqcap~⊓~sqcaps~⊓︀~sqcup~⊔~sqcups~⊔︀~sqsub~⊏~sqsube~⊑~sqsubset~⊏~sqsubseteq~⊑~sqsup~⊐~sqsupe~⊒~sqsupset~⊐~sqsupseteq~⊒~squ~□~square~□~squarf~▪~squf~▪~srarr~→~sscr~𝓈~ssetmn~∖~ssmile~⌣~sstarf~⋆~star~☆~starf~★~straightepsilon~ϵ~straightphi~ϕ~strns~¯~subE~⫅~subdot~⪽~subedot~⫃~submult~⫁~subnE~⫋~subne~⊊~subplus~⪿~subrarr~⥹~subset~⊂~subseteq~⊆~subseteqq~⫅~subsetneq~⊊~subsetneqq~⫋~subsim~⫇~subsub~⫕~subsup~⫓~succ~≻~succapprox~⪸~succcurlyeq~≽~succeq~⪰~succnapprox~⪺~succneqq~⪶~succnsim~⋩~succsim~≿~sung~♪~supE~⫆~supdot~⪾~supdsub~⫘~supedot~⫄~suphsol~⟉~suphsub~⫗~suplarr~⥻~supmult~⫂~supnE~⫌~supne~⊋~supplus~⫀~supset~⊃~supseteq~⊇~supseteqq~⫆~supsetneq~⊋~supsetneqq~⫌~supsim~⫈~supsub~⫔~supsup~⫖~swArr~⇙~swarhk~⤦~swarr~↙~swarrow~↙~swnwar~⤪~target~⌖~tbrk~⎴~tcaron~ť~tcedil~ţ~tcy~т~tdot~⃛~telrec~⌕~tfr~𝔱~therefore~∴~thetav~ϑ~thickapprox~≈~thicksim~∼~thkap~≈~thksim~∼~timesb~⊠~timesbar~⨱~timesd~⨰~tint~∭~toea~⤨~top~⊤~topbot~⌶~topcir~⫱~topf~𝕥~topfork~⫚~tosa~⤩~tprime~‴~triangle~▵~triangledown~▿~triangleleft~◃~trianglelefteq~⊴~triangleq~≜~triangleright~▹~trianglerighteq~⊵~tridot~◬~trie~≜~triminus~⨺~triplus~⨹~trisb~⧍~tritime~⨻~trpezium~⏢~tscr~𝓉~tscy~ц~tshcy~ћ~tstrok~ŧ~twixt~≬~twoheadleftarrow~↞~twoheadrightarrow~↠~uHar~⥣~ubrcy~ў~ubreve~ŭ~ucy~у~udarr~⇅~udblac~ű~udhar~⥮~ufisht~⥾~ufr~𝔲~uharl~↿~uharr~↾~uhblk~▀~ulcorn~⌜~ulcorner~⌜~ulcrop~⌏~ultri~◸~umacr~ū~uogon~ų~uopf~𝕦~uparrow~↑~updownarrow~↕~upharpoonleft~↿~upharpoonright~↾~uplus~⊎~upsi~υ~upuparrows~⇈~urcorn~⌝~urcorner~⌝~urcrop~⌎~uring~ů~urtri~◹~uscr~𝓊~utdot~⋰~utilde~ũ~utri~▵~utrif~▴~uuarr~⇈~uwangle~⦧~vArr~⇕~vBar~⫨~vBarv~⫩~vDash~⊨~vangrt~⦜~varepsilon~ϵ~varkappa~ϰ~varnothing~∅~varphi~ϕ~varpi~ϖ~varpropto~∝~varr~↕~varrho~ϱ~varsigma~ς~varsubsetneq~⊊︀~varsubsetneqq~⫋︀~varsupsetneq~⊋︀~varsupsetneqq~⫌︀~vartheta~ϑ~vartriangleleft~⊲~vartriangleright~⊳~vcy~в~vdash~⊢~vee~∨~veebar~⊻~veeeq~≚~vellip~⋮~verbar~|~vert~|~vfr~𝔳~vltri~⊲~vnsub~⊂⃒~vnsup~⊃⃒~vopf~𝕧~vprop~∝~vrtri~⊳~vscr~𝓋~vsubnE~⫋︀~vsubne~⊊︀~vsupnE~⫌︀~vsupne~⊋︀~vzigzag~⦚~wcirc~ŵ~wedbar~⩟~wedge~∧~wedgeq~≙~wfr~𝔴~wopf~𝕨~wp~℘~wr~≀~wreath~≀~wscr~𝓌~xcap~⋂~xcirc~◯~xcup~⋃~xdtri~▽~xfr~𝔵~xhArr~⟺~xharr~⟷~xlArr~⟸~xlarr~⟵~xmap~⟼~xnis~⋻~xodot~⨀~xopf~𝕩~xoplus~⨁~xotime~⨂~xrArr~⟹~xrarr~⟶~xscr~𝓍~xsqcup~⨆~xuplus~⨄~xutri~△~xvee~⋁~xwedge~⋀~yacy~я~ycirc~ŷ~ycy~ы~yfr~𝔶~yicy~ї~yopf~𝕪~yscr~𝓎~yucy~ю~zacute~ź~zcaron~ž~zcy~з~zdot~ż~zeetrf~ℨ~zfr~𝔷~zhcy~ж~zigrarr~⇝~zopf~𝕫~zscr~𝓏~~AMP~&~COPY~©~GT~>~LT~<~QUOT~\"~REG~®", exports.namedReferences['html4']); +//# sourceMappingURL=named-references.js.map + +/***/ }), + +/***/ 1057: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.numericUnicodeMap = void 0; +exports.numericUnicodeMap = { + 0: 65533, + 128: 8364, + 130: 8218, + 131: 402, + 132: 8222, + 133: 8230, + 134: 8224, + 135: 8225, + 136: 710, + 137: 8240, + 138: 352, + 139: 8249, + 140: 338, + 142: 381, + 145: 8216, + 146: 8217, + 147: 8220, + 148: 8221, + 149: 8226, + 150: 8211, + 151: 8212, + 152: 732, + 153: 8482, + 154: 353, + 155: 8250, + 156: 339, + 158: 382, + 159: 376 +}; +//# sourceMappingURL=numeric-unicode-map.js.map + +/***/ }), + +/***/ 9718: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.highSurrogateTo = exports.highSurrogateFrom = exports.getCodePoint = exports.fromCodePoint = void 0; +exports.fromCodePoint = String.fromCodePoint || + function (astralCodePoint) { + return String.fromCharCode(Math.floor((astralCodePoint - 0x10000) / 0x400) + 0xd800, ((astralCodePoint - 0x10000) % 0x400) + 0xdc00); + }; +// @ts-expect-error - String.prototype.codePointAt might not exist in older node versions +exports.getCodePoint = String.prototype.codePointAt + ? function (input, position) { + return input.codePointAt(position); + } + : function (input, position) { + return (input.charCodeAt(position) - 0xd800) * 0x400 + input.charCodeAt(position + 1) - 0xdc00 + 0x10000; + }; +exports.highSurrogateFrom = 0xd800; +exports.highSurrogateTo = 0xdbff; +//# sourceMappingURL=surrogate-pairs.js.map + +/***/ }), + +/***/ 1969: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/* eslint-disable node/no-missing-require */ + +function getPackageJSON() { + return __nccwpck_require__(2721); +} + +exports.getPackageJSON = getPackageJSON; + + /***/ }), /***/ 4012: @@ -65417,6 +107270,30 @@ module.exports = /*#__PURE__*/JSON.parse('{"name":"@actions/cache","version":"4. /***/ }), +/***/ 2721: +/***/ ((module) => { + +"use strict"; +module.exports = /*#__PURE__*/JSON.parse('{"name":"@google-cloud/storage","description":"Cloud Storage Client Library for Node.js","version":"7.16.0","license":"Apache-2.0","author":"Google Inc.","engines":{"node":">=14"},"repository":"googleapis/nodejs-storage","main":"./build/cjs/src/index.js","types":"./build/cjs/src/index.d.ts","type":"module","exports":{".":{"import":{"types":"./build/esm/src/index.d.ts","default":"./build/esm/src/index.js"},"require":{"types":"./build/cjs/src/index.d.ts","default":"./build/cjs/src/index.js"}}},"files":["build/cjs/src","build/cjs/package.json","!build/cjs/src/**/*.map","build/esm/src","!build/esm/src/**/*.map"],"keywords":["google apis client","google api client","google apis","google api","google","google cloud platform","google cloud","cloud","google storage","storage"],"scripts":{"all-test":"npm test && npm run system-test && npm run samples-test","benchwrapper":"node bin/benchwrapper.js","check":"gts check","clean":"rm -rf build/","compile:cjs":"tsc -p ./tsconfig.cjs.json","compile:esm":"tsc -p .","compile":"npm run compile:cjs && npm run compile:esm","conformance-test":"mocha --parallel build/cjs/conformance-test/ --require build/cjs/conformance-test/globalHooks.js","docs-test":"linkinator docs","docs":"jsdoc -c .jsdoc.json","fix":"gts fix","lint":"gts check","postcompile":"cp ./src/package-json-helper.cjs ./build/cjs/src && cp ./src/package-json-helper.cjs ./build/esm/src","postcompile:cjs":"babel --plugins gapic-tools/build/src/replaceImportMetaUrl,gapic-tools/build/src/toggleESMFlagVariable build/cjs/src/util.js -o build/cjs/src/util.js && cp internal-tooling/helpers/package.cjs.json build/cjs/package.json","precompile":"rm -rf build/","preconformance-test":"npm run compile:cjs -- --sourceMap","predocs-test":"npm run docs","predocs":"npm run compile:cjs -- --sourceMap","prelint":"cd samples; npm link ../; npm install","prepare":"npm run compile","presystem-test:esm":"npm run compile:esm","presystem-test":"npm run compile -- --sourceMap","pretest":"npm run compile -- --sourceMap","samples-test":"npm link && cd samples/ && npm link ../ && npm test && cd ../","system-test:esm":"mocha build/esm/system-test --timeout 600000 --exit","system-test":"mocha build/cjs/system-test --timeout 600000 --exit","test":"c8 mocha build/cjs/test"},"dependencies":{"@google-cloud/paginator":"^5.0.0","@google-cloud/projectify":"^4.0.0","@google-cloud/promisify":"<4.1.0","abort-controller":"^3.0.0","async-retry":"^1.3.3","duplexify":"^4.1.3","fast-xml-parser":"^4.4.1","gaxios":"^6.0.2","google-auth-library":"^9.6.3","html-entities":"^2.5.2","mime":"^3.0.0","p-limit":"^3.0.1","retry-request":"^7.0.0","teeny-request":"^9.0.0","uuid":"^8.0.0"},"devDependencies":{"@babel/cli":"^7.22.10","@babel/core":"^7.22.11","@google-cloud/pubsub":"^4.0.0","@grpc/grpc-js":"^1.0.3","@grpc/proto-loader":"^0.7.0","@types/async-retry":"^1.4.3","@types/duplexify":"^3.6.4","@types/mime":"^3.0.0","@types/mocha":"^9.1.1","@types/mockery":"^1.4.29","@types/node":"^22.0.0","@types/node-fetch":"^2.1.3","@types/proxyquire":"^1.3.28","@types/request":"^2.48.4","@types/sinon":"^17.0.0","@types/tmp":"0.2.6","@types/uuid":"^8.0.0","@types/yargs":"^17.0.10","c8":"^9.0.0","form-data":"^4.0.0","gapic-tools":"^0.4.0","gts":"^5.0.0","jsdoc":"^4.0.0","jsdoc-fresh":"^3.0.0","jsdoc-region-tag":"^3.0.0","linkinator":"^3.0.0","mocha":"^9.2.2","mockery":"^2.1.0","nock":"~13.5.0","node-fetch":"^2.6.7","pack-n-play":"^2.0.0","proxyquire":"^2.1.3","sinon":"^18.0.0","nise":"6.0.0","path-to-regexp":"6.3.0","tmp":"^0.2.0","typescript":"^5.1.6","yargs":"^17.3.1"}}'); + +/***/ }), + +/***/ 6495: +/***/ ((module) => { + +"use strict"; +module.exports = /*#__PURE__*/JSON.parse('{"name":"gaxios","version":"6.7.1","description":"A simple common HTTP client specifically for Google APIs and services.","main":"build/src/index.js","types":"build/src/index.d.ts","files":["build/src"],"scripts":{"lint":"gts check","test":"c8 mocha build/test","presystem-test":"npm run compile","system-test":"mocha build/system-test --timeout 80000","compile":"tsc -p .","fix":"gts fix","prepare":"npm run compile","pretest":"npm run compile","webpack":"webpack","prebrowser-test":"npm run compile","browser-test":"node build/browser-test/browser-test-runner.js","docs":"compodoc src/","docs-test":"linkinator docs","predocs-test":"npm run docs","samples-test":"cd samples/ && npm link ../ && npm test && cd ../","prelint":"cd samples; npm link ../; npm install","clean":"gts clean","precompile":"gts clean"},"repository":"googleapis/gaxios","keywords":["google"],"engines":{"node":">=14"},"author":"Google, LLC","license":"Apache-2.0","devDependencies":{"@babel/plugin-proposal-private-methods":"^7.18.6","@compodoc/compodoc":"1.1.19","@types/cors":"^2.8.6","@types/express":"^4.16.1","@types/extend":"^3.0.1","@types/mocha":"^9.0.0","@types/multiparty":"0.0.36","@types/mv":"^2.1.0","@types/ncp":"^2.0.1","@types/node":"^20.0.0","@types/node-fetch":"^2.5.7","@types/sinon":"^17.0.0","@types/tmp":"0.2.6","@types/uuid":"^10.0.0","abort-controller":"^3.0.0","assert":"^2.0.0","browserify":"^17.0.0","c8":"^8.0.0","cheerio":"1.0.0-rc.10","cors":"^2.8.5","execa":"^5.0.0","express":"^4.16.4","form-data":"^4.0.0","gts":"^5.0.0","is-docker":"^2.0.0","karma":"^6.0.0","karma-chrome-launcher":"^3.0.0","karma-coverage":"^2.0.0","karma-firefox-launcher":"^2.0.0","karma-mocha":"^2.0.0","karma-remap-coverage":"^0.1.5","karma-sourcemap-loader":"^0.4.0","karma-webpack":"5.0.0","linkinator":"^3.0.0","mocha":"^8.0.0","multiparty":"^4.2.1","mv":"^2.1.1","ncp":"^2.0.0","nock":"^13.0.0","null-loader":"^4.0.0","puppeteer":"^19.0.0","sinon":"^18.0.0","stream-browserify":"^3.0.0","tmp":"0.2.3","ts-loader":"^8.0.0","typescript":"^5.1.6","webpack":"^5.35.0","webpack-cli":"^4.0.0"},"dependencies":{"extend":"^3.0.2","https-proxy-agent":"^7.0.1","is-stream":"^2.0.0","node-fetch":"^2.6.9","uuid":"^9.0.1"}}'); + +/***/ }), + +/***/ 6066: +/***/ ((module) => { + +"use strict"; +module.exports = /*#__PURE__*/JSON.parse('{"name":"google-auth-library","version":"9.15.1","author":"Google Inc.","description":"Google APIs Authentication Client Library for Node.js","engines":{"node":">=14"},"main":"./build/src/index.js","types":"./build/src/index.d.ts","repository":"googleapis/google-auth-library-nodejs.git","keywords":["google","api","google apis","client","client library"],"dependencies":{"base64-js":"^1.3.0","ecdsa-sig-formatter":"^1.0.11","gaxios":"^6.1.1","gcp-metadata":"^6.1.0","gtoken":"^7.0.0","jws":"^4.0.0"},"devDependencies":{"@types/base64-js":"^1.2.5","@types/chai":"^4.1.7","@types/jws":"^3.1.0","@types/mocha":"^9.0.0","@types/mv":"^2.1.0","@types/ncp":"^2.0.1","@types/node":"^20.4.2","@types/sinon":"^17.0.0","assert-rejects":"^1.0.0","c8":"^8.0.0","chai":"^4.2.0","cheerio":"1.0.0-rc.12","codecov":"^3.0.2","engine.io":"6.6.2","gts":"^5.0.0","is-docker":"^2.0.0","jsdoc":"^4.0.0","jsdoc-fresh":"^3.0.0","jsdoc-region-tag":"^3.0.0","karma":"^6.0.0","karma-chrome-launcher":"^3.0.0","karma-coverage":"^2.0.0","karma-firefox-launcher":"^2.0.0","karma-mocha":"^2.0.0","karma-sourcemap-loader":"^0.4.0","karma-webpack":"5.0.0","keypair":"^1.0.4","linkinator":"^4.0.0","mocha":"^9.2.2","mv":"^2.1.1","ncp":"^2.0.0","nock":"^13.0.0","null-loader":"^4.0.0","pdfmake":"0.2.12","puppeteer":"^21.0.0","sinon":"^18.0.0","ts-loader":"^8.0.0","typescript":"^5.1.6","webpack":"^5.21.2","webpack-cli":"^4.0.0"},"files":["build/src","!build/src/**/*.map"],"scripts":{"test":"c8 mocha build/test","clean":"gts clean","prepare":"npm run compile","lint":"gts check","compile":"tsc -p .","fix":"gts fix","pretest":"npm run compile -- --sourceMap","docs":"jsdoc -c .jsdoc.json","samples-setup":"cd samples/ && npm link ../ && npm run setup && cd ../","samples-test":"cd samples/ && npm link ../ && npm test && cd ../","system-test":"mocha build/system-test --timeout 60000","presystem-test":"npm run compile -- --sourceMap","webpack":"webpack","browser-test":"karma start","docs-test":"linkinator docs","predocs-test":"npm run docs","prelint":"cd samples; npm link ../; npm install","precompile":"gts clean"},"license":"Apache-2.0"}'); + +/***/ }), + /***/ 1813: /***/ ((module) => {